[ 542.016308] env[68279]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68279) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 542.016706] env[68279]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68279) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 542.016706] env[68279]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68279) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 542.017051] env[68279]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 542.114697] env[68279]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68279) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 542.124766] env[68279]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68279) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 542.167678] env[68279]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 542.727238] env[68279]: INFO nova.virt.driver [None req-c0632c4e-b2ac-43ab-a79b-3863848b7848 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 542.797286] env[68279]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 542.797475] env[68279]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 542.797542] env[68279]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68279) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 545.812326] env[68279]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-9943b7f6-68ed-4e66-813b-dd711b7830e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.829562] env[68279]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68279) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 545.829766] env[68279]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1568973b-f375-4a87-ab68-886482fc005e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.865643] env[68279]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 1ff0c. [ 545.865933] env[68279]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.068s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 545.866406] env[68279]: INFO nova.virt.vmwareapi.driver [None req-c0632c4e-b2ac-43ab-a79b-3863848b7848 None None] VMware vCenter version: 7.0.3 [ 545.869955] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4987ba67-248a-4139-9f68-77234ef1b5d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.888401] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0960dd6-fa4c-4dea-b7f0-f1c106654060 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.894694] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30565ca3-81bd-4f40-8c8a-c59db4901aa8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.901654] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b51932-61e1-4174-ac42-ce19d2727328 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.915281] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4c68c7-cb86-4fa6-9a44-e7b6193f17da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.921975] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6595192b-bfec-449c-b8cc-2411d53055f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.953330] env[68279]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-bb67e9a1-a3de-4b2b-a53c-4b4122a21d18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.959701] env[68279]: DEBUG nova.virt.vmwareapi.driver [None req-c0632c4e-b2ac-43ab-a79b-3863848b7848 None None] Extension org.openstack.compute already exists. {{(pid=68279) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 545.962356] env[68279]: INFO nova.compute.provider_config [None req-c0632c4e-b2ac-43ab-a79b-3863848b7848 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 546.465528] env[68279]: DEBUG nova.context [None req-c0632c4e-b2ac-43ab-a79b-3863848b7848 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),4538920e-925b-4b05-8481-b1e8e6d7e1c8(cell1) {{(pid=68279) load_cells /opt/stack/nova/nova/context.py:464}} [ 546.467622] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 546.467999] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 546.468811] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 546.469308] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Acquiring lock "4538920e-925b-4b05-8481-b1e8e6d7e1c8" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 546.469505] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Lock "4538920e-925b-4b05-8481-b1e8e6d7e1c8" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 546.470557] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Lock "4538920e-925b-4b05-8481-b1e8e6d7e1c8" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 546.491296] env[68279]: INFO dbcounter [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Registered counter for database nova_cell0 [ 546.500294] env[68279]: INFO dbcounter [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Registered counter for database nova_cell1 [ 546.958927] env[68279]: DEBUG oslo_db.sqlalchemy.engines [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68279) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 546.959678] env[68279]: DEBUG oslo_db.sqlalchemy.engines [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68279) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 546.964287] env[68279]: ERROR nova.db.main.api [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 546.964287] env[68279]: result = function(*args, **kwargs) [ 546.964287] env[68279]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 546.964287] env[68279]: return func(*args, **kwargs) [ 546.964287] env[68279]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 546.964287] env[68279]: result = fn(*args, **kwargs) [ 546.964287] env[68279]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 546.964287] env[68279]: return f(*args, **kwargs) [ 546.964287] env[68279]: File "/opt/stack/nova/nova/objects/service.py", line 560, in _db_service_get_minimum_version [ 546.964287] env[68279]: return db.service_get_minimum_version(context, binaries) [ 546.964287] env[68279]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 546.964287] env[68279]: _check_db_access() [ 546.964287] env[68279]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 546.964287] env[68279]: stacktrace = ''.join(traceback.format_stack()) [ 546.964287] env[68279]: [ 546.965299] env[68279]: ERROR nova.db.main.api [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 546.965299] env[68279]: result = function(*args, **kwargs) [ 546.965299] env[68279]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 546.965299] env[68279]: return func(*args, **kwargs) [ 546.965299] env[68279]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 546.965299] env[68279]: result = fn(*args, **kwargs) [ 546.965299] env[68279]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 546.965299] env[68279]: return f(*args, **kwargs) [ 546.965299] env[68279]: File "/opt/stack/nova/nova/objects/service.py", line 560, in _db_service_get_minimum_version [ 546.965299] env[68279]: return db.service_get_minimum_version(context, binaries) [ 546.965299] env[68279]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 546.965299] env[68279]: _check_db_access() [ 546.965299] env[68279]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 546.965299] env[68279]: stacktrace = ''.join(traceback.format_stack()) [ 546.965299] env[68279]: [ 546.965704] env[68279]: WARNING nova.objects.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 546.965868] env[68279]: WARNING nova.objects.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Failed to get minimum service version for cell 4538920e-925b-4b05-8481-b1e8e6d7e1c8 [ 546.966334] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Acquiring lock "singleton_lock" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.966491] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Acquired lock "singleton_lock" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 546.966728] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Releasing lock "singleton_lock" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 546.967058] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Full set of CONF: {{(pid=68279) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 546.967205] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ******************************************************************************** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 546.967331] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] Configuration options gathered from: {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 546.967463] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 546.967654] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 546.967780] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ================================================================================ {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 546.968035] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] allow_resize_to_same_host = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.968222] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] arq_binding_timeout = 300 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.968356] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] backdoor_port = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.968483] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] backdoor_socket = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.968649] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] block_device_allocate_retries = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.968831] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] block_device_allocate_retries_interval = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.969033] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cert = self.pem {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.969210] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.969383] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute_monitors = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.969779] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] config_dir = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.969968] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] config_drive_format = iso9660 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.970135] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.970323] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] config_source = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.970498] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] console_host = devstack {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.970665] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] control_exchange = nova {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.970825] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cpu_allocation_ratio = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.970987] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] daemon = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.971171] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] debug = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.971329] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] default_access_ip_network_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.971492] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] default_availability_zone = nova {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.971646] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] default_ephemeral_format = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.971802] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] default_green_pool_size = 1000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.972048] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.972215] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] default_schedule_zone = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.972373] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] disk_allocation_ratio = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.972530] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] enable_new_services = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.972704] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] enabled_apis = ['osapi_compute'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.972864] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] enabled_ssl_apis = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.973039] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] flat_injected = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.973198] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] force_config_drive = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.973354] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] force_raw_images = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.973521] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] graceful_shutdown_timeout = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.973680] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] heal_instance_info_cache_interval = -1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.973903] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] host = cpu-1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.974097] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.974265] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.974424] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.974635] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.974795] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instance_build_timeout = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.974954] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instance_delete_interval = 300 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.975132] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instance_format = [instance: %(uuid)s] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.975298] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instance_name_template = instance-%08x {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.975455] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instance_usage_audit = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.975619] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instance_usage_audit_period = month {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.975780] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.975943] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.976122] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] internal_service_availability_zone = internal {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.976279] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] key = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.976435] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] live_migration_retry_count = 30 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.976601] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_color = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.976761] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_config_append = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.976921] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.977092] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_dir = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.977251] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_file = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.977374] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_options = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.977530] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_rotate_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.977694] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_rotate_interval_type = days {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.977887] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] log_rotation_type = none {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.978026] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.978155] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.978321] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.978483] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.978608] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.978765] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] long_rpc_timeout = 1800 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.978923] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] max_concurrent_builds = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.979094] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] max_concurrent_live_migrations = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.979253] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] max_concurrent_snapshots = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.979407] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] max_local_block_devices = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.979559] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] max_logfile_count = 30 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.979712] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] max_logfile_size_mb = 200 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.979866] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] maximum_instance_delete_attempts = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.980037] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] metadata_listen = 0.0.0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.980232] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] metadata_listen_port = 8775 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.980399] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] metadata_workers = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.980557] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] migrate_max_retries = -1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.980719] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] mkisofs_cmd = genisoimage {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.980919] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.981062] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] my_ip = 10.180.1.21 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.981264] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.981424] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] network_allocate_retries = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.981595] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.981759] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.981919] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] osapi_compute_listen_port = 8774 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.982098] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] osapi_compute_unique_server_name_scope = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.982266] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] osapi_compute_workers = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.982425] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] password_length = 12 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.982581] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] periodic_enable = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.982735] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] periodic_fuzzy_delay = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.982901] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] pointer_model = usbtablet {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.983076] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] preallocate_images = none {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.983236] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] publish_errors = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.983363] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] pybasedir = /opt/stack/nova {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.983517] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ram_allocation_ratio = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.983671] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] rate_limit_burst = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.983834] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] rate_limit_except_level = CRITICAL {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.983991] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] rate_limit_interval = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.984160] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] reboot_timeout = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.984316] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] reclaim_instance_interval = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.984466] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] record = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.984632] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] reimage_timeout_per_gb = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.984794] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] report_interval = 120 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.984951] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] rescue_timeout = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.985120] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] reserved_host_cpus = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.985277] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] reserved_host_disk_mb = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.985431] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] reserved_host_memory_mb = 512 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.985586] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] reserved_huge_pages = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.985739] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] resize_confirm_window = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.985894] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] resize_fs_using_block_device = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.986058] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] resume_guests_state_on_host_boot = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.986223] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.986379] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] rpc_response_timeout = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.986537] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] run_external_periodic_tasks = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.986703] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] running_deleted_instance_action = reap {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.986858] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.987027] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] running_deleted_instance_timeout = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.987192] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler_instance_sync_interval = 120 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.987357] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_down_time = 720 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.987521] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] servicegroup_driver = db {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.987674] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] shell_completion = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.987870] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] shelved_offload_time = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.988035] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] shelved_poll_interval = 3600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.988207] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] shutdown_timeout = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.988365] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] source_is_ipv6 = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.988518] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ssl_only = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.988762] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.988959] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] sync_power_state_interval = 600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.989145] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] sync_power_state_pool_size = 1000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.989314] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] syslog_log_facility = LOG_USER {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.989468] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] tempdir = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.989627] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] timeout_nbd = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.989791] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] transport_url = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.989954] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] update_resources_interval = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.990122] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] use_cow_images = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.990282] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] use_journal = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.990437] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] use_json = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.990588] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] use_rootwrap_daemon = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.990738] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] use_stderr = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.990889] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] use_syslog = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.991051] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vcpu_pin_set = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.991219] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plugging_is_fatal = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.991380] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plugging_timeout = 300 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.991538] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] virt_mkfs = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.991694] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] volume_usage_poll_interval = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.991847] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] watch_log_file = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.992027] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] web = /usr/share/spice-html5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 546.992206] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.992369] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.992528] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.992691] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_concurrency.disable_process_locking = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.992970] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.993167] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.993333] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.993503] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.993670] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.993836] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.994034] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.auth_strategy = keystone {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.994204] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.compute_link_prefix = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.994374] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.994544] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.dhcp_domain = novalocal {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.994705] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.enable_instance_password = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.994867] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.glance_link_prefix = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.995040] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.995211] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.995370] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.instance_list_per_project_cells = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.995528] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.list_records_by_skipping_down_cells = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.995686] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.local_metadata_per_cell = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.995849] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.max_limit = 1000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.996022] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.metadata_cache_expiration = 15 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.996197] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.neutron_default_tenant_id = default {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.996363] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.response_validation = warn {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.996530] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.use_neutron_default_nets = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.996700] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.996859] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.997031] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.997204] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.997369] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.vendordata_dynamic_targets = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.997529] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.vendordata_jsonfile_path = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.997704] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.997921] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.backend = dogpile.cache.memcached {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.998108] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.backend_argument = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.998271] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.backend_expiration_time = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.998439] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.config_prefix = cache.oslo {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.998607] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.dead_timeout = 60.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.998767] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.debug_cache_backend = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.998954] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.enable_retry_client = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.999134] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.enable_socket_keepalive = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.999306] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.enabled = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.999469] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.enforce_fips_mode = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.999629] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.expiration_time = 600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.999788] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.hashclient_retry_attempts = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 546.999950] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.000124] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_dead_retry = 300 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.000284] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_password = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.000444] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.000601] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.000758] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_pool_maxsize = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.000916] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.001091] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_sasl_enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.001268] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.001435] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.001592] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.memcache_username = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.001755] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.proxies = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.001917] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.redis_db = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.002088] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.redis_password = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.002259] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.002433] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.002601] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.redis_server = localhost:6379 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.002765] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.redis_socket_timeout = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.002924] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.redis_username = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.003100] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.retry_attempts = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.003267] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.retry_delay = 0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.003430] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.socket_keepalive_count = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.003590] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.socket_keepalive_idle = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.003747] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.socket_keepalive_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.003902] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.tls_allowed_ciphers = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.004072] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.tls_cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.004231] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.tls_certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.004389] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.tls_enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.004543] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cache.tls_keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.004711] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.004882] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.auth_type = password {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.005057] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.005235] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.005396] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.005558] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.005718] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.cross_az_attach = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.005909] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.debug = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.006101] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.endpoint_template = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.006274] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.http_retries = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.006436] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.006592] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.006759] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.os_region_name = RegionOne {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.006919] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.007091] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cinder.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.007265] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.007423] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.cpu_dedicated_set = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.007580] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.cpu_shared_set = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.007742] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.image_type_exclude_list = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.007927] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.008110] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.008276] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.008437] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.008605] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.008770] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.resource_provider_association_refresh = 300 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.008955] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.009140] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.shutdown_retry_interval = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.009325] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.009503] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] conductor.workers = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.009680] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] console.allowed_origins = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.009840] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] console.ssl_ciphers = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.010018] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] console.ssl_minimum_version = default {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.010191] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] consoleauth.enforce_session_timeout = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.010357] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] consoleauth.token_ttl = 600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.010527] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.010681] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.010842] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.011008] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.011174] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.011331] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.011489] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.011644] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.011801] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.011958] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.012128] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.012286] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.012441] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.012608] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.service_type = accelerator {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.012768] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.012928] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.013098] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.013257] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.013435] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.013596] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] cyborg.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.013762] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.asyncio_connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.013921] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.asyncio_slave_connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.014106] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.backend = sqlalchemy {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.014277] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.014443] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.connection_debug = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.014609] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.connection_parameters = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.014772] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.connection_recycle_time = 3600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.014932] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.connection_trace = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.015107] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.db_inc_retry_interval = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.015271] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.db_max_retries = 20 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.015432] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.db_max_retry_interval = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.015593] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.db_retry_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.015753] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.max_overflow = 50 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.015937] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.max_pool_size = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.016124] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.max_retries = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.016296] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.016455] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.mysql_wsrep_sync_wait = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.016609] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.pool_timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.016768] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.retry_interval = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.016923] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.slave_connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.017095] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.sqlite_synchronous = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.017260] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] database.use_db_reconnect = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.017425] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.asyncio_connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.017580] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.asyncio_slave_connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.017747] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.backend = sqlalchemy {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.017966] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.018159] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.connection_debug = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.018336] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.connection_parameters = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.018499] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.connection_recycle_time = 3600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.018661] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.connection_trace = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.018844] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.db_inc_retry_interval = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.019033] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.db_max_retries = 20 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.019207] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.db_max_retry_interval = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.019372] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.db_retry_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.019534] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.max_overflow = 50 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.019695] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.max_pool_size = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.019856] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.max_retries = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.020046] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.020212] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.020370] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.pool_timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.020530] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.retry_interval = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.020686] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.slave_connection = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.020853] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] api_database.sqlite_synchronous = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.021053] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] devices.enabled_mdev_types = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.021229] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.021400] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.021560] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ephemeral_storage_encryption.enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.021719] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.021885] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.api_servers = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.022059] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.022221] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.022378] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.022533] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.022690] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.022848] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.debug = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.023013] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.default_trusted_certificate_ids = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.023184] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.enable_certificate_validation = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.023343] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.enable_rbd_download = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.023497] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.023658] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.023816] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.023976] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.024142] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.024301] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.num_retries = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.024466] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.rbd_ceph_conf = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.024624] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.rbd_connect_timeout = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.024786] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.rbd_pool = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.024949] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.rbd_user = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.025126] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.025283] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.025437] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.025600] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.service_type = image {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.025757] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.025934] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.026112] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.026270] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.026446] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.026606] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.verify_glance_signatures = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.026760] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] glance.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.026921] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] guestfs.debug = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.027102] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.027266] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.auth_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.027423] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.027578] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.027770] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.027928] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.028116] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.028278] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.028440] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.028595] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.028751] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.028934] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.029114] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.029276] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.029434] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.029602] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.service_type = shared-file-system {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.029763] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.share_apply_policy_timeout = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.029938] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.030147] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.030324] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.030486] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.030666] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.030825] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] manila.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.030992] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] mks.enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.031347] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.031536] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] image_cache.manager_interval = 2400 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.031704] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] image_cache.precache_concurrency = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.031874] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] image_cache.remove_unused_base_images = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.032054] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.032226] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.032398] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] image_cache.subdirectory_name = _base {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.032571] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.api_max_retries = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.032733] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.api_retry_interval = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.032891] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.033065] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.auth_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.033225] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.033378] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.033539] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.033699] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.conductor_group = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.033857] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.034026] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.034187] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.034349] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.034504] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.034661] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.034817] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.034980] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.peer_list = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.035149] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.035305] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.035464] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.serial_console_state_timeout = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.035619] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.035786] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.service_type = baremetal {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.035968] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.shard = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.036152] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.036309] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.036467] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.036626] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.036805] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.036965] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ironic.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.037158] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.037329] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] key_manager.fixed_key = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.037507] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.037666] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.barbican_api_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.037841] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.barbican_endpoint = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.038039] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.barbican_endpoint_type = public {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.038206] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.barbican_region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.038364] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.038520] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.038678] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.038854] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.039036] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.039203] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.number_of_retries = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.039361] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.retry_delay = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.039519] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.send_service_user_token = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.039678] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.039833] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.039992] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.verify_ssl = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.040181] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican.verify_ssl_path = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.040357] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.040520] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.auth_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.040676] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.040830] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.040990] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.041162] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.041317] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.041475] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.041629] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] barbican_service_user.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.041792] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.approle_role_id = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.041962] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.approle_secret_id = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.042173] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.kv_mountpoint = secret {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.042339] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.kv_path = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.042504] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.kv_version = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.042660] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.namespace = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.042817] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.root_token_id = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.042974] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.ssl_ca_crt_file = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.043159] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.timeout = 60.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.043320] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.use_ssl = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.043486] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.043649] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.043805] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.043965] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.044135] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.044293] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.044449] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.044607] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.044761] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.044917] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.045085] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.045245] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.045403] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.045559] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.045724] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.service_type = identity {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.045902] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.046082] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.046245] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.046402] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.046577] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.046733] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] keystone.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.046918] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.ceph_mount_options = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.047317] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.047503] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.connection_uri = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.047669] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.cpu_mode = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.047858] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.048050] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.cpu_models = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.048226] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.cpu_power_governor_high = performance {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.048392] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.048553] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.cpu_power_management = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.048720] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.048905] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.device_detach_attempts = 8 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.049093] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.device_detach_timeout = 20 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.049262] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.disk_cachemodes = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.049420] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.disk_prefix = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.049581] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.enabled_perf_events = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.049740] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.file_backed_memory = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.049900] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.gid_maps = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.050066] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.hw_disk_discard = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.050224] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.hw_machine_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.050388] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.images_rbd_ceph_conf = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.050548] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.050707] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.050870] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.images_rbd_glance_store_name = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.051048] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.images_rbd_pool = rbd {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.051217] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.images_type = default {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.051373] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.images_volume_group = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.051528] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.inject_key = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.051684] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.inject_partition = -2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.051839] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.inject_password = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.051997] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.iscsi_iface = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.052169] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.iser_use_multipath = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.052329] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.052487] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.052643] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_downtime = 500 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.052800] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.052956] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.053124] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_inbound_addr = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.053281] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.053437] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.053591] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_scheme = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.053757] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_timeout_action = abort {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.053914] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_tunnelled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.054118] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_uri = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.054305] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.live_migration_with_native_tls = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.054466] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.max_queues = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.054626] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.054849] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.055023] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.nfs_mount_options = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.055317] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.055491] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.055658] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.055820] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.055981] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.056159] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.num_pcie_ports = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.056324] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.056487] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.pmem_namespaces = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.056645] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.quobyte_client_cfg = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.056924] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.057136] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.057343] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.057515] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.057677] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rbd_secret_uuid = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.057858] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rbd_user = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.058045] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.058223] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.058384] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rescue_image_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.058542] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rescue_kernel_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.058697] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rescue_ramdisk_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.058882] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.059064] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.rx_queue_size = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.059236] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.smbfs_mount_options = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.059521] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.059698] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.snapshot_compression = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.059862] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.snapshot_image_format = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.060104] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.060277] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.sparse_logical_volumes = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.060441] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.swtpm_enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.060611] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.swtpm_group = tss {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.060779] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.swtpm_user = tss {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.060999] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.sysinfo_serial = unique {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.061207] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.tb_cache_size = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.061373] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.tx_queue_size = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.061540] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.uid_maps = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.061702] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.use_virtio_for_bridges = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.061873] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.virt_type = kvm {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.062056] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.volume_clear = zero {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.062223] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.volume_clear_size = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.062384] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.volume_enforce_multipath = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.062548] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.volume_use_multipath = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.062708] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.vzstorage_cache_path = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.062877] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.063056] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.063223] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.063390] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.063747] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.063939] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.vzstorage_mount_user = stack {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.064126] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.064305] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.064482] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.auth_type = password {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.064646] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.064806] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.064974] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.065149] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.065311] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.065481] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.default_floating_pool = public {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.065639] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.065808] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.extension_sync_interval = 600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.066007] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.http_retries = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.066186] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.066346] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.066504] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.066674] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.066834] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.067010] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.ovs_bridge = br-int {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.067184] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.physnets = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.067353] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.region_name = RegionOne {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.067512] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.067680] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.service_metadata_proxy = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.067863] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.068057] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.service_type = network {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.068225] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.068384] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.068541] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.068697] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.068894] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.069076] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] neutron.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.069253] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] notifications.bdms_in_notifications = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.069429] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] notifications.default_level = INFO {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.069590] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] notifications.include_share_mapping = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.069763] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] notifications.notification_format = unversioned {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.069925] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] notifications.notify_on_state_change = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.070111] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.070287] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] pci.alias = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.070454] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] pci.device_spec = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.070617] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] pci.report_in_placement = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.070787] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.070960] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.auth_type = password {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.071141] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.071302] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.071461] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.071621] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.071779] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.071933] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.072104] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.default_domain_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.072261] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.default_domain_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.072417] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.domain_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.072571] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.domain_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.072728] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.072886] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.073073] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.073239] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.073392] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.073560] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.password = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.073717] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.project_domain_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.073881] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.project_domain_name = Default {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.074093] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.project_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.074275] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.project_name = service {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.074444] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.region_name = RegionOne {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.074604] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.074765] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.074933] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.service_type = placement {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.075108] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.075268] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.075426] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.075581] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.system_scope = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.075736] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.075890] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.trust_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.076059] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.user_domain_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.076229] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.user_domain_name = Default {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.076387] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.user_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.076556] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.username = nova {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.076734] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.076893] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] placement.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.077087] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.cores = 20 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.077252] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.count_usage_from_placement = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.077419] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.077584] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.injected_file_content_bytes = 10240 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.077747] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.injected_file_path_length = 255 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.077938] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.injected_files = 5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.078122] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.instances = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.078287] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.key_pairs = 100 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.078454] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.metadata_items = 128 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.078616] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.ram = 51200 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.078776] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.recheck_quota = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.078940] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.server_group_members = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.079117] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.server_groups = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.079332] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.079506] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] quota.unified_limits_resource_strategy = require {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.079678] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.079839] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.080013] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.image_metadata_prefilter = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.080178] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.080338] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.max_attempts = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.080496] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.max_placement_results = 1000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.080655] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.080812] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.080969] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.081153] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] scheduler.workers = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.081330] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.081498] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.081675] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.081843] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.082014] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.082189] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.082352] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.082534] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.082697] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.host_subset_size = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.082858] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.083032] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.083197] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.083356] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.083527] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.083688] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.isolated_hosts = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.083847] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.isolated_images = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.084023] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.084181] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.084340] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.084496] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.pci_in_placement = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.084655] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.084814] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.084973] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.085145] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.085305] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.085463] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.085620] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.track_instance_changes = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.085798] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.085991] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] metrics.required = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.086172] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] metrics.weight_multiplier = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.086334] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.086493] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] metrics.weight_setting = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.086804] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.086977] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] serial_console.enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.087164] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] serial_console.port_range = 10000:20000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.087332] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.087497] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.087662] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] serial_console.serialproxy_port = 6083 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.087858] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.088038] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.auth_type = password {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.088204] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.088362] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.088521] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.088679] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.088835] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.089011] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.send_service_user_token = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.089178] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.089334] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] service_user.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.089502] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.agent_enabled = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.089662] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.089960] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.090181] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.090350] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.html5proxy_port = 6082 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.090510] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.image_compression = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.090666] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.jpeg_compression = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.090818] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.playback_compression = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.090978] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.require_secure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.091158] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.server_listen = 127.0.0.1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.091325] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.091598] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.091766] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.streaming_mode = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.091928] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] spice.zlib_compression = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.092106] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] upgrade_levels.baseapi = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.092277] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] upgrade_levels.compute = auto {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.092433] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] upgrade_levels.conductor = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.092588] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] upgrade_levels.scheduler = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.092750] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.092910] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.093081] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.093242] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.093401] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.093558] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.093713] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.093870] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.094037] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vendordata_dynamic_auth.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.094212] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.api_retry_count = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.094373] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.ca_file = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.094542] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.094707] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.cluster_name = testcl1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.094869] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.connection_pool_size = 10 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.095039] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.console_delay_seconds = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.095210] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.datastore_regex = ^datastore.* {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.095413] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.095587] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.host_password = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.095755] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.host_port = 443 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.095944] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.host_username = administrator@vsphere.local {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.096142] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.insecure = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.096308] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.integration_bridge = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.096472] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.maximum_objects = 100 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.096630] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.pbm_default_policy = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.096790] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.pbm_enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.096947] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.pbm_wsdl_location = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.097130] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.097291] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.serial_port_proxy_uri = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.097447] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.serial_port_service_uri = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.097613] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.task_poll_interval = 0.5 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.097782] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.use_linked_clone = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.097988] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.vnc_keymap = en-us {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.098175] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.vnc_port = 5900 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.098340] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vmware.vnc_port_total = 10000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.098523] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.auth_schemes = ['none'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.098697] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.098977] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.099175] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.099347] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.novncproxy_port = 6080 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.099533] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.server_listen = 127.0.0.1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.099710] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.099871] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.vencrypt_ca_certs = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.100040] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.vencrypt_client_cert = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.100201] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vnc.vencrypt_client_key = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.100375] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.100536] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.disable_deep_image_inspection = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.100694] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.100851] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.101021] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.101180] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.disable_rootwrap = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.101339] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.enable_numa_live_migration = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.101495] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.101653] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.101810] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.101968] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.libvirt_disable_apic = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.102132] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.102294] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.102595] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.102766] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.102932] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.103110] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.103273] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.103432] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.103589] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.103750] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.103935] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.104160] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.client_socket_timeout = 900 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.104338] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.default_pool_size = 1000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.104505] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.keep_alive = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.104672] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.max_header_line = 16384 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.104835] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.104999] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.ssl_ca_file = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.105172] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.ssl_cert_file = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.105334] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.ssl_key_file = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.105499] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.tcp_keepidle = 600 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.105672] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.105839] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] zvm.ca_file = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.106012] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] zvm.cloud_connector_url = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107077] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107077] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] zvm.reachable_timeout = 300 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107077] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107077] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107077] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.connection_string = messaging:// {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107250] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.enabled = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107358] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.es_doc_type = notification {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107512] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.es_scroll_size = 10000 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107680] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.es_scroll_time = 2m {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.107862] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.filter_error_trace = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.108049] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.hmac_keys = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.108220] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.sentinel_service_name = mymaster {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.108387] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.socket_timeout = 0.1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.108546] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.trace_requests = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.108704] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler.trace_sqlalchemy = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.108882] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler_jaeger.process_tags = {} {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.109053] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler_jaeger.service_name_prefix = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.109218] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] profiler_otlp.service_name_prefix = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.109378] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] remote_debug.host = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.109537] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] remote_debug.port = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.109707] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.109882] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.110078] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.110248] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.110408] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.110567] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.110723] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.110882] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.111054] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.111223] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.111381] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.111550] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.111717] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.111881] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.112060] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.112234] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.112396] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.112556] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.112726] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.112888] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.113062] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.113231] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.113393] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.113552] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.113711] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.113869] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.114046] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.114212] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.114370] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.114529] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.114689] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.ssl = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.114855] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.115032] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.115197] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.115362] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.115528] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.115688] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.115887] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.116075] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_notifications.retry = -1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.116258] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.116481] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.116665] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.auth_section = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.116834] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.auth_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.116997] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.cafile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.117171] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.certfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.117336] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.collect_timing = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.117493] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.connect_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.117653] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.connect_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.117826] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.endpoint_id = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.118026] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.endpoint_interface = publicURL {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.118193] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.endpoint_override = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.118352] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.endpoint_region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.118510] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.endpoint_service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.118665] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.endpoint_service_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.118827] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.insecure = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.119011] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.keyfile = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.119181] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.max_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.119338] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.min_version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.119492] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.region_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.119648] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.retriable_status_codes = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.119804] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.service_name = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.119961] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.service_type = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.120149] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.split_loggers = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.120327] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.status_code_retries = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.120488] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.status_code_retry_delay = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.120647] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.timeout = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.120803] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.valid_interfaces = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.120967] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_limit.version = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.121156] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_reports.file_event_handler = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.121321] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.121478] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] oslo_reports.log_dir = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.121646] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.121802] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.121985] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.122178] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.122345] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.122505] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.122670] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.122827] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_ovs_privileged.group = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.122984] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.123165] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.123325] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.123480] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] vif_plug_ovs_privileged.user = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.123646] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.123820] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.123991] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.124176] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.124344] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.124513] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.124675] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.124832] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.125014] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.125189] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_ovs.isolate_vif = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.125353] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.125515] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.125680] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.125876] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.126088] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] os_vif_ovs.per_port_bridge = False {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.126267] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] privsep_osbrick.capabilities = [21] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.126427] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] privsep_osbrick.group = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.126581] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] privsep_osbrick.helper_command = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.126742] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.126905] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.127073] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] privsep_osbrick.user = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.127244] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.127399] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] nova_sys_admin.group = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.127551] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] nova_sys_admin.helper_command = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.127710] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.127898] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.128087] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] nova_sys_admin.user = None {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.128223] env[68279]: DEBUG oslo_service.backend.eventlet.service [None req-c1dec3cf-6484-433e-b803-5339a1ab20f1 None None] ******************************************************************************** {{(pid=68279) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 547.128631] env[68279]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 547.632431] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Getting list of instances from cluster (obj){ [ 547.632431] env[68279]: value = "domain-c8" [ 547.632431] env[68279]: _type = "ClusterComputeResource" [ 547.632431] env[68279]: } {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 547.633526] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd1beb6-a7e0-48c6-8710-06b8b691168b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.643544] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Got total of 0 instances {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 547.644117] env[68279]: WARNING nova.virt.vmwareapi.driver [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 547.644601] env[68279]: INFO nova.virt.node [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Generated node identity 40ba16cf-8244-4715-b8c1-975029462ee4 [ 547.644838] env[68279]: INFO nova.virt.node [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Wrote node identity 40ba16cf-8244-4715-b8c1-975029462ee4 to /opt/stack/data/n-cpu-1/compute_id [ 548.147937] env[68279]: WARNING nova.compute.manager [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Compute nodes ['40ba16cf-8244-4715-b8c1-975029462ee4'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 549.153078] env[68279]: INFO nova.compute.manager [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 550.158741] env[68279]: WARNING nova.compute.manager [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 550.159043] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 550.159275] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 550.159430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 550.159582] env[68279]: DEBUG nova.compute.resource_tracker [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 550.160550] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43bfd5af-ae3a-4d7e-b0e1-7e49dd0ce8db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.169312] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a7787f-477e-4a6f-baf7-42c7dd4aa950 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.183914] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc1e4e8-c0f4-480d-86bd-68d586dd57a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.189689] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e389138-ffa3-4429-b6c6-c8bbb9e008b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.217626] env[68279]: DEBUG nova.compute.resource_tracker [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181111MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 550.217759] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 550.217972] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 550.720868] env[68279]: WARNING nova.compute.resource_tracker [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] No compute node record for cpu-1:40ba16cf-8244-4715-b8c1-975029462ee4: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 40ba16cf-8244-4715-b8c1-975029462ee4 could not be found. [ 551.224343] env[68279]: INFO nova.compute.resource_tracker [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 40ba16cf-8244-4715-b8c1-975029462ee4 [ 552.732210] env[68279]: DEBUG nova.compute.resource_tracker [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 552.732616] env[68279]: DEBUG nova.compute.resource_tracker [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 552.916472] env[68279]: INFO nova.scheduler.client.report [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] [req-25348578-e5bd-4d68-b435-f2a54087bcfb] Created resource provider record via placement API for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 552.939735] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81df0e2-8448-49c1-beab-01202a8b62e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.947999] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7a835e-affb-464e-80d5-148939ec11a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.979130] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65bb11e-40f4-4c8f-a1be-b4f8062c812c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.986887] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0ab74b-a3f4-4cc0-9144-76600add99d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.000540] env[68279]: DEBUG nova.compute.provider_tree [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.534872] env[68279]: DEBUG nova.scheduler.client.report [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 553.535127] env[68279]: DEBUG nova.compute.provider_tree [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 0 to 1 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 553.535303] env[68279]: DEBUG nova.compute.provider_tree [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.594115] env[68279]: DEBUG nova.compute.provider_tree [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 1 to 2 during operation: update_traits {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 554.099324] env[68279]: DEBUG nova.compute.resource_tracker [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 554.099751] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.882s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 554.099751] env[68279]: DEBUG nova.service [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Creating RPC server for service compute {{(pid=68279) start /opt/stack/nova/nova/service.py:186}} [ 554.113963] env[68279]: DEBUG nova.service [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] Join ServiceGroup membership for this service compute {{(pid=68279) start /opt/stack/nova/nova/service.py:203}} [ 554.114116] env[68279]: DEBUG nova.servicegroup.drivers.db [None req-3794da8c-ca93-42f3-9291-e413cb2b39c8 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68279) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 575.118783] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.622980] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Getting list of instances from cluster (obj){ [ 575.622980] env[68279]: value = "domain-c8" [ 575.622980] env[68279]: _type = "ClusterComputeResource" [ 575.622980] env[68279]: } {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 575.624144] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3491a92a-ce98-4483-8616-a9d0988298de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.633127] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Got total of 0 instances {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 575.633365] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.633659] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Getting list of instances from cluster (obj){ [ 575.633659] env[68279]: value = "domain-c8" [ 575.633659] env[68279]: _type = "ClusterComputeResource" [ 575.633659] env[68279]: } {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 575.634488] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd173aa9-d051-4ce6-af48-76892d56610d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.641828] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Got total of 0 instances {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 592.497837] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.498131] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.512191] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "3d3b5611-714f-4757-b848-891319c2fea3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.512471] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "3d3b5611-714f-4757-b848-891319c2fea3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.002713] env[68279]: DEBUG nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 593.015396] env[68279]: DEBUG nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 593.550172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.550734] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.553069] env[68279]: INFO nova.compute.claims [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.556474] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.634870] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5810ec0-55d2-4858-876e-91934a96789e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.647034] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93be6c6-8f18-41e8-b9ea-8a5672aba67b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.684049] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1535d493-d372-4f80-bd06-2d898b829d62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.693060] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d83640-5181-4e52-99ba-bd7e77502d59 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.711254] env[68279]: DEBUG nova.compute.provider_tree [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.195677] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.195952] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.214868] env[68279]: DEBUG nova.scheduler.client.report [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 595.699281] env[68279]: DEBUG nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 595.724044] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.724643] env[68279]: DEBUG nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 595.730572] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.173s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.731259] env[68279]: INFO nova.compute.claims [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.233044] env[68279]: DEBUG nova.compute.utils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 596.237228] env[68279]: DEBUG nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 596.237228] env[68279]: DEBUG nova.network.neutron [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.244390] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.569054] env[68279]: DEBUG nova.policy [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d12c43445684388939cc7ff0910c462', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4aef4bec1d68418390ba922a536e5712', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.740034] env[68279]: DEBUG nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 596.828335] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ad6d19-4a62-43ee-902d-ff12e5cd6e63 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.840702] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8145d2-b94b-449f-99b6-b8a77edccecb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.876761] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a68eb3b-b188-4a2c-a3a5-7f6aad7175f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.884581] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e573375-234c-4a89-a1d3-4016efdef103 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.898996] env[68279]: DEBUG nova.compute.provider_tree [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.403949] env[68279]: DEBUG nova.scheduler.client.report [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 597.457131] env[68279]: DEBUG nova.network.neutron [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Successfully created port: 3824fbbc-b1fe-488c-a0d7-75f277ff669a {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.747784] env[68279]: DEBUG nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 597.785727] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 597.785727] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.785727] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.786300] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.786300] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.786300] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 597.786300] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 597.786300] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 597.786439] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 597.786439] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 597.786439] env[68279]: DEBUG nova.virt.hardware [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 597.787987] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3755ccde-5dda-4185-8830-e0933525fea3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.798044] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff705bc1-5fca-4f69-a8a1-bef9bcd5c9ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.818868] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ec1269-f4fe-4324-9410-7a73c730b04a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.911014] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.181s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.913017] env[68279]: DEBUG nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 597.918223] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.670s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.919356] env[68279]: INFO nova.compute.claims [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.428188] env[68279]: DEBUG nova.compute.utils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 598.432469] env[68279]: DEBUG nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 598.432469] env[68279]: DEBUG nova.network.neutron [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.644544] env[68279]: DEBUG nova.policy [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23bb0f4f036f49028ea36cc57370e52e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c61e9b1eab849a3aca9bf7fbfb21886', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 598.936453] env[68279]: DEBUG nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.052183] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "50e08259-7915-49bb-b137-5cc6e9d53c16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.052351] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.056236] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa4191b-2469-4d2d-a2d8-bb38415a9f84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.064177] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f534f7e3-6260-45e2-8ca8-80e8ffb8292c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.099925] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3a4c39-cad4-486f-9ba9-f336e32d1ab2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.108178] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65677b70-61ff-4300-ab97-6ae3a9ebf9fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.122580] env[68279]: DEBUG nova.compute.provider_tree [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.560656] env[68279]: DEBUG nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 599.625831] env[68279]: DEBUG nova.scheduler.client.report [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 599.951302] env[68279]: DEBUG nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 599.983473] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 599.984659] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.985232] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 599.985514] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.985824] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 599.986049] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 599.986310] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 599.986513] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 599.986726] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 599.986922] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 599.987224] env[68279]: DEBUG nova.virt.hardware [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 599.989536] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c38d7f-2a32-4441-918f-7d796ad1a630 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.999090] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7e9ce8-fbc9-4a40-8b89-406deaa80075 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.091236] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.135563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.137157] env[68279]: DEBUG nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 600.140895] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.050s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.145914] env[68279]: INFO nova.compute.claims [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.390763] env[68279]: DEBUG nova.network.neutron [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Successfully created port: 485b302b-3131-449a-ae46-edcfc4e6a588 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.513142] env[68279]: DEBUG nova.network.neutron [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Successfully updated port: 3824fbbc-b1fe-488c-a0d7-75f277ff669a {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.648633] env[68279]: DEBUG nova.compute.utils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 600.654549] env[68279]: DEBUG nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 600.654724] env[68279]: DEBUG nova.network.neutron [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 600.920735] env[68279]: DEBUG nova.policy [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6755ca1c30aa43ef9e956e58f09ec6d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4242bed09e2465887b6fe1ebf06292b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 601.019664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "refresh_cache-8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.019664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired lock "refresh_cache-8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.019664] env[68279]: DEBUG nova.network.neutron [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 601.159096] env[68279]: DEBUG nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 601.255572] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4503509f-e7eb-4c5e-ac1c-ddb233abe7e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.266073] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2388ef16-e498-4cc3-ad50-3957c02bcaa2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.302960] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd25700-afdb-48e0-ac81-33279e7790ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.310959] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ffc18e-0048-49a6-92c4-cc078ce01e1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.324678] env[68279]: DEBUG nova.compute.provider_tree [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.671130] env[68279]: DEBUG nova.network.neutron [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.827825] env[68279]: DEBUG nova.scheduler.client.report [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 601.919559] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.919791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.175733] env[68279]: DEBUG nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 602.178189] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.178465] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.178657] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.178844] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.179080] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.179304] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.179453] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.179617] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 602.179762] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.195263] env[68279]: DEBUG nova.network.neutron [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Successfully created port: 6169f442-4572-4d81-9091-252e8a2afb74 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.219262] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 602.219262] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.224146] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 602.224146] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.224146] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 602.224146] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 602.224146] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 602.224298] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 602.224298] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 602.224298] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 602.224298] env[68279]: DEBUG nova.virt.hardware [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 602.224613] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2057c7-4a2a-48d3-9541-de9779293161 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.239381] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b19a94f-1c24-4bd8-beb8-c49ca8190822 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.242414] env[68279]: DEBUG nova.network.neutron [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Updating instance_info_cache with network_info: [{"id": "3824fbbc-b1fe-488c-a0d7-75f277ff669a", "address": "fa:16:3e:09:e9:92", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3824fbbc-b1", "ovs_interfaceid": "3824fbbc-b1fe-488c-a0d7-75f277ff669a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.343465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.343465] env[68279]: DEBUG nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 602.423755] env[68279]: DEBUG nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.438776] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.439135] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.686693] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.686693] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.686877] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.690538] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 602.691553] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865b9e95-6b05-4a83-b31c-d260c3513e82 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.706407] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e149e6c1-c5ff-43c8-bd94-b01f8efc8dd3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.721336] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81aff9b-5ebb-4290-b1ae-9aa4045e052c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.729284] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9410bda-4aa3-4909-a145-4e3f31c61f38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.762098] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Releasing lock "refresh_cache-8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.762400] env[68279]: DEBUG nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Instance network_info: |[{"id": "3824fbbc-b1fe-488c-a0d7-75f277ff669a", "address": "fa:16:3e:09:e9:92", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3824fbbc-b1", "ovs_interfaceid": "3824fbbc-b1fe-488c-a0d7-75f277ff669a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 602.762969] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181102MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 602.763166] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.763382] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.765816] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:e9:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3824fbbc-b1fe-488c-a0d7-75f277ff669a', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 602.780832] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.781282] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57e916ff-2d99-45ca-8602-a3d249cec948 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.797705] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Created folder: OpenStack in parent group-v4. [ 602.797705] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Creating folder: Project (4aef4bec1d68418390ba922a536e5712). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.797705] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad34291b-b82f-4656-89f6-73ca4f6a5642 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.807470] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Created folder: Project (4aef4bec1d68418390ba922a536e5712) in parent group-v594445. [ 602.807631] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Creating folder: Instances. Parent ref: group-v594446. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.808645] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0188aca1-25fa-4a01-8c73-b446fa66eef9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.818823] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Created folder: Instances in parent group-v594446. [ 602.819122] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 602.819557] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.820084] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8222befc-84f7-4d73-949a-eb3c1f2b2f2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.840780] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.840780] env[68279]: value = "task-2962395" [ 602.840780] env[68279]: _type = "Task" [ 602.840780] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.852508] env[68279]: DEBUG nova.compute.utils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 602.853647] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962395, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.857602] env[68279]: DEBUG nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 602.857768] env[68279]: DEBUG nova.network.neutron [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 602.933717] env[68279]: DEBUG nova.compute.manager [req-4044bc70-3d53-4fa5-8b6f-c1ee8496145e req-d4fcc5bc-366a-40c7-a173-407a758fe5fb service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Received event network-vif-plugged-3824fbbc-b1fe-488c-a0d7-75f277ff669a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 602.934042] env[68279]: DEBUG oslo_concurrency.lockutils [req-4044bc70-3d53-4fa5-8b6f-c1ee8496145e req-d4fcc5bc-366a-40c7-a173-407a758fe5fb service nova] Acquiring lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.934294] env[68279]: DEBUG oslo_concurrency.lockutils [req-4044bc70-3d53-4fa5-8b6f-c1ee8496145e req-d4fcc5bc-366a-40c7-a173-407a758fe5fb service nova] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.934508] env[68279]: DEBUG oslo_concurrency.lockutils [req-4044bc70-3d53-4fa5-8b6f-c1ee8496145e req-d4fcc5bc-366a-40c7-a173-407a758fe5fb service nova] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.934637] env[68279]: DEBUG nova.compute.manager [req-4044bc70-3d53-4fa5-8b6f-c1ee8496145e req-d4fcc5bc-366a-40c7-a173-407a758fe5fb service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] No waiting events found dispatching network-vif-plugged-3824fbbc-b1fe-488c-a0d7-75f277ff669a {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 602.934799] env[68279]: WARNING nova.compute.manager [req-4044bc70-3d53-4fa5-8b6f-c1ee8496145e req-d4fcc5bc-366a-40c7-a173-407a758fe5fb service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Received unexpected event network-vif-plugged-3824fbbc-b1fe-488c-a0d7-75f277ff669a for instance with vm_state building and task_state spawning. [ 602.942518] env[68279]: DEBUG nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.963332] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.040158] env[68279]: DEBUG nova.policy [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5cd7e44689a40d993e5da3165332fd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '295e4a23df6e4d029636d514484434e5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 603.353704] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962395, 'name': CreateVM_Task, 'duration_secs': 0.367197} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.353997] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 603.360289] env[68279]: DEBUG nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 603.397518] env[68279]: DEBUG oslo_vmware.service [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74665f84-55ef-479c-a644-1f2e35cab55a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.404979] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.404979] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.405321] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 603.405900] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baccbdc9-47ee-4685-8b19-8709f6492dd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.411966] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 603.411966] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a478a-c1c4-5481-5555-23e2cdb13547" [ 603.411966] env[68279]: _type = "Task" [ 603.411966] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.426310] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a478a-c1c4-5481-5555-23e2cdb13547, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.463951] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.542110] env[68279]: DEBUG nova.network.neutron [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Successfully updated port: 485b302b-3131-449a-ae46-edcfc4e6a588 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 603.815614] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 3d3b5611-714f-4757-b848-891319c2fea3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.816321] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.816639] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e65722bb-e39a-47e5-9aaf-87cfd27930d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.816867] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 50e08259-7915-49bb-b137-5cc6e9d53c16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.902486] env[68279]: DEBUG nova.network.neutron [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Successfully created port: ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.927034] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.927192] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 603.927346] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.927947] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.927947] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.928261] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7592cbe4-9106-41b6-8bf3-559cc1e90599 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.937398] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.937618] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 603.938496] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783164ae-1780-426c-ae60-84fdcf0eb7c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.949612] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-922c3acb-843a-4855-b6fe-07965bd50f9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.959026] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 603.959026] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525f2a8a-4ded-d57d-c3e0-cf63dd9ffbd0" [ 603.959026] env[68279]: _type = "Task" [ 603.959026] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.971052] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525f2a8a-4ded-d57d-c3e0-cf63dd9ffbd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.045482] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "refresh_cache-3d3b5611-714f-4757-b848-891319c2fea3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.045748] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquired lock "refresh_cache-3d3b5611-714f-4757-b848-891319c2fea3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.046391] env[68279]: DEBUG nova.network.neutron [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.323383] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 604.368797] env[68279]: DEBUG nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 604.396704] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 604.396941] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.397114] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 604.397295] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.397440] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 604.397585] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 604.398140] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 604.398472] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 604.398577] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 604.399118] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 604.399975] env[68279]: DEBUG nova.virt.hardware [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 604.401230] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c58678c-2c5d-4c7b-a5f2-5e4e2ebf2381 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.411091] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1267a1d7-86f8-4d1b-8d83-165c1065cb84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.468578] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Preparing fetch location {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 604.468578] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Creating directory with path [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 604.468844] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3af9f215-67f3-4941-ab5f-317bd572faa9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.492811] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Created directory with path [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 604.493051] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Fetch image to [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 604.493577] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Downloading image file data 01e502b7-2447-4972-9fe7-fd69f76ef71f to [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk on the data store datastore1 {{(pid=68279) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 604.495501] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2675fa57-51cf-4e11-bc86-05ac251f07a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.508924] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a846ab-c995-40f1-b9df-7599debb299d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.520288] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4c24c9-e2fe-4fe4-8bbe-40889f1573a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.562552] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d4542a-e008-4248-b903-07c298474692 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.572889] env[68279]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d34e596d-811f-41c2-8cb3-ed1396bf3536 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.648881] env[68279]: DEBUG nova.network.neutron [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.668182] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Downloading image file data 01e502b7-2447-4972-9fe7-fd69f76ef71f to the data store datastore1 {{(pid=68279) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 604.829948] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 604.830239] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 604.832023] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 604.881838] env[68279]: DEBUG oslo_vmware.rw_handles [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 605.031275] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d3696a-4014-4d8d-af5a-24540cb116f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.046655] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5d8a09-8fbc-444a-b9ca-4ef56a25c911 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.053818] env[68279]: DEBUG nova.network.neutron [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Updating instance_info_cache with network_info: [{"id": "485b302b-3131-449a-ae46-edcfc4e6a588", "address": "fa:16:3e:d5:ed:0f", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap485b302b-31", "ovs_interfaceid": "485b302b-3131-449a-ae46-edcfc4e6a588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.059911] env[68279]: DEBUG nova.compute.manager [req-c573215d-bc8d-43c5-9e62-037e49029de9 req-f2cb2965-5fdc-4096-8651-35ce40d3fb36 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Received event network-vif-plugged-485b302b-3131-449a-ae46-edcfc4e6a588 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 605.060168] env[68279]: DEBUG oslo_concurrency.lockutils [req-c573215d-bc8d-43c5-9e62-037e49029de9 req-f2cb2965-5fdc-4096-8651-35ce40d3fb36 service nova] Acquiring lock "3d3b5611-714f-4757-b848-891319c2fea3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.060345] env[68279]: DEBUG oslo_concurrency.lockutils [req-c573215d-bc8d-43c5-9e62-037e49029de9 req-f2cb2965-5fdc-4096-8651-35ce40d3fb36 service nova] Lock "3d3b5611-714f-4757-b848-891319c2fea3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.060503] env[68279]: DEBUG oslo_concurrency.lockutils [req-c573215d-bc8d-43c5-9e62-037e49029de9 req-f2cb2965-5fdc-4096-8651-35ce40d3fb36 service nova] Lock "3d3b5611-714f-4757-b848-891319c2fea3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.062601] env[68279]: DEBUG nova.compute.manager [req-c573215d-bc8d-43c5-9e62-037e49029de9 req-f2cb2965-5fdc-4096-8651-35ce40d3fb36 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] No waiting events found dispatching network-vif-plugged-485b302b-3131-449a-ae46-edcfc4e6a588 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 605.062601] env[68279]: WARNING nova.compute.manager [req-c573215d-bc8d-43c5-9e62-037e49029de9 req-f2cb2965-5fdc-4096-8651-35ce40d3fb36 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Received unexpected event network-vif-plugged-485b302b-3131-449a-ae46-edcfc4e6a588 for instance with vm_state building and task_state spawning. [ 605.103547] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30e589d-05ae-41d4-a04e-766970afaedf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.116500] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479949c7-1eb9-4a86-b5b4-daeafc8c93a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.141907] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.192512] env[68279]: DEBUG nova.network.neutron [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Successfully updated port: 6169f442-4572-4d81-9091-252e8a2afb74 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.548518] env[68279]: DEBUG oslo_vmware.rw_handles [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Completed reading data from the image iterator. {{(pid=68279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 605.549107] env[68279]: DEBUG oslo_vmware.rw_handles [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 605.557153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Releasing lock "refresh_cache-3d3b5611-714f-4757-b848-891319c2fea3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.557355] env[68279]: DEBUG nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Instance network_info: |[{"id": "485b302b-3131-449a-ae46-edcfc4e6a588", "address": "fa:16:3e:d5:ed:0f", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap485b302b-31", "ovs_interfaceid": "485b302b-3131-449a-ae46-edcfc4e6a588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 605.558624] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:ed:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '485b302b-3131-449a-ae46-edcfc4e6a588', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 605.570688] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Creating folder: Project (2c61e9b1eab849a3aca9bf7fbfb21886). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 605.571128] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acaa41b0-0b2c-4643-bb50-47e8806e211b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.583056] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Created folder: Project (2c61e9b1eab849a3aca9bf7fbfb21886) in parent group-v594445. [ 605.583056] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Creating folder: Instances. Parent ref: group-v594449. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 605.583056] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b4803b5-c39e-44ae-a68f-a7f6a1517f25 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.596023] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Created folder: Instances in parent group-v594449. [ 605.596023] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 605.596023] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 605.596023] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73ce03f2-0202-4d13-b722-18332e67055e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.628131] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 605.628131] env[68279]: value = "task-2962398" [ 605.628131] env[68279]: _type = "Task" [ 605.628131] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.637159] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962398, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.656376] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.700022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "refresh_cache-e65722bb-e39a-47e5-9aaf-87cfd27930d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.700022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquired lock "refresh_cache-e65722bb-e39a-47e5-9aaf-87cfd27930d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.700022] env[68279]: DEBUG nova.network.neutron [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.702157] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Downloaded image file data 01e502b7-2447-4972-9fe7-fd69f76ef71f to vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk on the data store datastore1 {{(pid=68279) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 605.703846] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Caching image {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 605.704116] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Copying Virtual Disk [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk to [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 605.704771] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9cc2f93-1075-4d2d-a494-5412a1ada897 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.720033] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 605.720033] env[68279]: value = "task-2962399" [ 605.720033] env[68279]: _type = "Task" [ 605.720033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.732962] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962399, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.799912] env[68279]: DEBUG nova.network.neutron [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.141146] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962398, 'name': CreateVM_Task, 'duration_secs': 0.331111} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.145151] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 606.146019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.149584] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.149765] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 606.150947] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5f8d1eb-235f-44ea-95aa-85a9c26b7587 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.159872] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 606.159872] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522f1aef-9979-7723-fb52-70e1242cbc2d" [ 606.159872] env[68279]: _type = "Task" [ 606.159872] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.166118] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 606.166317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.403s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.170783] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.207s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.172443] env[68279]: INFO nova.compute.claims [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 606.179170] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522f1aef-9979-7723-fb52-70e1242cbc2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.232078] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962399, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.306505] env[68279]: DEBUG nova.network.neutron [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Updating instance_info_cache with network_info: [{"id": "6169f442-4572-4d81-9091-252e8a2afb74", "address": "fa:16:3e:c9:75:5e", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6169f442-45", "ovs_interfaceid": "6169f442-4572-4d81-9091-252e8a2afb74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.658181] env[68279]: DEBUG nova.compute.manager [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Received event network-changed-3824fbbc-b1fe-488c-a0d7-75f277ff669a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 606.658452] env[68279]: DEBUG nova.compute.manager [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Refreshing instance network info cache due to event network-changed-3824fbbc-b1fe-488c-a0d7-75f277ff669a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 606.659133] env[68279]: DEBUG oslo_concurrency.lockutils [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] Acquiring lock "refresh_cache-8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.659649] env[68279]: DEBUG oslo_concurrency.lockutils [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] Acquired lock "refresh_cache-8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.659816] env[68279]: DEBUG nova.network.neutron [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Refreshing network info cache for port 3824fbbc-b1fe-488c-a0d7-75f277ff669a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.675781] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.676050] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 606.676430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.737172] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962399, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688686} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.737662] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Copied Virtual Disk [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk to [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 606.737921] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Deleting the datastore file [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.738648] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0689466f-3bb7-44ce-b732-a26fd6a78cee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.747064] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 606.747064] env[68279]: value = "task-2962400" [ 606.747064] env[68279]: _type = "Task" [ 606.747064] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.756660] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.806625] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Releasing lock "refresh_cache-e65722bb-e39a-47e5-9aaf-87cfd27930d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.806625] env[68279]: DEBUG nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Instance network_info: |[{"id": "6169f442-4572-4d81-9091-252e8a2afb74", "address": "fa:16:3e:c9:75:5e", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6169f442-45", "ovs_interfaceid": "6169f442-4572-4d81-9091-252e8a2afb74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 606.807611] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:75:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6169f442-4572-4d81-9091-252e8a2afb74', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.816802] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Creating folder: Project (e4242bed09e2465887b6fe1ebf06292b). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.818106] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9acbb9da-ad52-4ae5-a6bd-003728ad140c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.830021] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Created folder: Project (e4242bed09e2465887b6fe1ebf06292b) in parent group-v594445. [ 606.830021] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Creating folder: Instances. Parent ref: group-v594452. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.830021] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffb76a7b-e598-4d66-ac09-9fd36248053d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.839718] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Created folder: Instances in parent group-v594452. [ 606.839718] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 606.840090] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.840475] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a1aa0fb-789d-4edf-b6ef-08defd1863ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.867338] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.867338] env[68279]: value = "task-2962403" [ 606.867338] env[68279]: _type = "Task" [ 606.867338] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.878858] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962403, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.999751] env[68279]: DEBUG nova.network.neutron [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Successfully updated port: ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 607.240963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "6b778e98-12c2-42a5-a772-06ea32d090b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.241402] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.266040] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022424} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.266040] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 607.266040] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Moving file from [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9/01e502b7-2447-4972-9fe7-fd69f76ef71f to [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f. {{(pid=68279) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 607.266040] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-d59e09ab-76e5-4ff8-9a29-f686f6a0fb5a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.270308] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 607.270308] env[68279]: value = "task-2962404" [ 607.270308] env[68279]: _type = "Task" [ 607.270308] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.279333] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962404, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.334062] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d93a58-b771-4712-bb79-950986039206 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.342891] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b00565d-f42f-4f72-9570-86d7dcf30327 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.381707] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f88cc83-fc05-4c5c-9c25-1e9372a3a546 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.390445] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962403, 'name': CreateVM_Task, 'duration_secs': 0.348593} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.391978] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 607.392671] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.392828] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.393244] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 607.394103] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0693a069-4148-4f6e-8dfb-ac83c42761eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.400752] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461fb288-277b-4165-ad31-7e06284e113c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.406976] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 607.406976] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5272684f-b33f-9395-624d-0cf48a8ca517" [ 607.406976] env[68279]: _type = "Task" [ 607.406976] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.418617] env[68279]: DEBUG nova.compute.provider_tree [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.425115] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5272684f-b33f-9395-624d-0cf48a8ca517, 'name': SearchDatastore_Task, 'duration_secs': 0.00793} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.425416] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.425639] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.427200] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.503167] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "refresh_cache-50e08259-7915-49bb-b137-5cc6e9d53c16" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.503167] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "refresh_cache-50e08259-7915-49bb-b137-5cc6e9d53c16" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.503167] env[68279]: DEBUG nova.network.neutron [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.743672] env[68279]: DEBUG nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 607.780724] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962404, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.022824} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.781255] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] File moved {{(pid=68279) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 607.781255] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Cleaning up location [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 607.781419] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Deleting the datastore file [datastore1] vmware_temp/f0614814-5417-4aae-9015-4730e31369b9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.783541] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30c37b7f-00b4-4e0a-bd21-88adf69c8733 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.790019] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 607.790019] env[68279]: value = "task-2962405" [ 607.790019] env[68279]: _type = "Task" [ 607.790019] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.799465] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.922403] env[68279]: DEBUG nova.scheduler.client.report [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 608.061164] env[68279]: DEBUG nova.network.neutron [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Updated VIF entry in instance network info cache for port 3824fbbc-b1fe-488c-a0d7-75f277ff669a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 608.061164] env[68279]: DEBUG nova.network.neutron [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Updating instance_info_cache with network_info: [{"id": "3824fbbc-b1fe-488c-a0d7-75f277ff669a", "address": "fa:16:3e:09:e9:92", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3824fbbc-b1", "ovs_interfaceid": "3824fbbc-b1fe-488c-a0d7-75f277ff669a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.124416] env[68279]: DEBUG nova.compute.manager [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Received event network-changed-485b302b-3131-449a-ae46-edcfc4e6a588 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 608.124416] env[68279]: DEBUG nova.compute.manager [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Refreshing instance network info cache due to event network-changed-485b302b-3131-449a-ae46-edcfc4e6a588. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 608.124416] env[68279]: DEBUG oslo_concurrency.lockutils [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] Acquiring lock "refresh_cache-3d3b5611-714f-4757-b848-891319c2fea3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.124416] env[68279]: DEBUG oslo_concurrency.lockutils [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] Acquired lock "refresh_cache-3d3b5611-714f-4757-b848-891319c2fea3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.124416] env[68279]: DEBUG nova.network.neutron [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Refreshing network info cache for port 485b302b-3131-449a-ae46-edcfc4e6a588 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 608.272153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.299226] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02758} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.300134] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.300288] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76799367-6dd1-4866-909b-fb81050aecd6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.305680] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 608.305680] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526968bd-cc51-927f-10b1-dcbdf6274c71" [ 608.305680] env[68279]: _type = "Task" [ 608.305680] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.313409] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526968bd-cc51-927f-10b1-dcbdf6274c71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.344492] env[68279]: DEBUG nova.network.neutron [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.430112] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.430112] env[68279]: DEBUG nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 608.431197] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.967s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.434746] env[68279]: INFO nova.compute.claims [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.566948] env[68279]: DEBUG oslo_concurrency.lockutils [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] Releasing lock "refresh_cache-8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.566948] env[68279]: DEBUG nova.compute.manager [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Received event network-vif-plugged-6169f442-4572-4d81-9091-252e8a2afb74 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 608.566948] env[68279]: DEBUG oslo_concurrency.lockutils [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] Acquiring lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.566948] env[68279]: DEBUG oslo_concurrency.lockutils [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.566948] env[68279]: DEBUG oslo_concurrency.lockutils [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.567239] env[68279]: DEBUG nova.compute.manager [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] No waiting events found dispatching network-vif-plugged-6169f442-4572-4d81-9091-252e8a2afb74 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 608.567239] env[68279]: WARNING nova.compute.manager [req-8f15864c-3d50-4ddf-b020-2fff50247ac8 req-8fb9b118-df65-4e6b-bc81-0c9a23a55691 service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Received unexpected event network-vif-plugged-6169f442-4572-4d81-9091-252e8a2afb74 for instance with vm_state building and task_state spawning. [ 608.815848] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526968bd-cc51-927f-10b1-dcbdf6274c71, 'name': SearchDatastore_Task, 'duration_secs': 0.024364} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.816153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.816264] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7/8782d86d-0e94-44b4-9595-b0eb2b2a3fb7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 608.816533] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.816715] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.816916] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-129e7503-14c5-4227-ba47-646204a5495c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.818967] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1a078df-81ab-4dd3-8457-0690cb0f3a5b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.825507] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 608.825507] env[68279]: value = "task-2962406" [ 608.825507] env[68279]: _type = "Task" [ 608.825507] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.830343] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.830343] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.830839] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20102b7d-d9e4-4c78-b0b7-7e2e2a4cbfda {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.835869] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.838849] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 608.838849] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d6c9d2-19f2-c0cb-2c9b-c5f6187f1dbd" [ 608.838849] env[68279]: _type = "Task" [ 608.838849] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.849777] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d6c9d2-19f2-c0cb-2c9b-c5f6187f1dbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.940907] env[68279]: DEBUG nova.compute.utils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 608.944164] env[68279]: DEBUG nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 608.944342] env[68279]: DEBUG nova.network.neutron [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 609.059756] env[68279]: DEBUG nova.network.neutron [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Updating instance_info_cache with network_info: [{"id": "ab5d6d31-7d88-47ee-a53a-80e39c3e2a72", "address": "fa:16:3e:a4:cc:3f", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab5d6d31-7d", "ovs_interfaceid": "ab5d6d31-7d88-47ee-a53a-80e39c3e2a72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.279667] env[68279]: DEBUG nova.policy [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5cd7e44689a40d993e5da3165332fd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '295e4a23df6e4d029636d514484434e5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 609.329562] env[68279]: DEBUG nova.compute.manager [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Received event network-changed-6169f442-4572-4d81-9091-252e8a2afb74 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 609.329655] env[68279]: DEBUG nova.compute.manager [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Refreshing instance network info cache due to event network-changed-6169f442-4572-4d81-9091-252e8a2afb74. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 609.329805] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Acquiring lock "refresh_cache-e65722bb-e39a-47e5-9aaf-87cfd27930d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.329938] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Acquired lock "refresh_cache-e65722bb-e39a-47e5-9aaf-87cfd27930d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.330122] env[68279]: DEBUG nova.network.neutron [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Refreshing network info cache for port 6169f442-4572-4d81-9091-252e8a2afb74 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.344602] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962406, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.356462] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d6c9d2-19f2-c0cb-2c9b-c5f6187f1dbd, 'name': SearchDatastore_Task, 'duration_secs': 0.011698} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.356462] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47f96f70-b4fc-4672-b7d3-987a3f696369 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.364514] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 609.364514] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526e1f0f-0002-c95c-028b-8ef64c65e290" [ 609.364514] env[68279]: _type = "Task" [ 609.364514] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.377149] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526e1f0f-0002-c95c-028b-8ef64c65e290, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.446363] env[68279]: DEBUG nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 609.550108] env[68279]: DEBUG nova.network.neutron [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Updated VIF entry in instance network info cache for port 485b302b-3131-449a-ae46-edcfc4e6a588. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 609.550467] env[68279]: DEBUG nova.network.neutron [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Updating instance_info_cache with network_info: [{"id": "485b302b-3131-449a-ae46-edcfc4e6a588", "address": "fa:16:3e:d5:ed:0f", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap485b302b-31", "ovs_interfaceid": "485b302b-3131-449a-ae46-edcfc4e6a588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.561992] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "refresh_cache-50e08259-7915-49bb-b137-5cc6e9d53c16" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.562297] env[68279]: DEBUG nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance network_info: |[{"id": "ab5d6d31-7d88-47ee-a53a-80e39c3e2a72", "address": "fa:16:3e:a4:cc:3f", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab5d6d31-7d", "ovs_interfaceid": "ab5d6d31-7d88-47ee-a53a-80e39c3e2a72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 609.563105] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:cc:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab5d6d31-7d88-47ee-a53a-80e39c3e2a72', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.571015] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating folder: Project (295e4a23df6e4d029636d514484434e5). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 609.574386] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22632f80-ecfa-458e-afc6-ab0d3106ef64 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.585423] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created folder: Project (295e4a23df6e4d029636d514484434e5) in parent group-v594445. [ 609.585537] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating folder: Instances. Parent ref: group-v594455. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 609.585802] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-226b5511-ac8b-4e9b-bdc5-67b144a68f4b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.596155] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created folder: Instances in parent group-v594455. [ 609.596155] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 609.598189] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 609.598584] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae0f10eb-f5b6-42bd-b25a-d31667ed907e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.621902] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 609.621902] env[68279]: value = "task-2962409" [ 609.621902] env[68279]: _type = "Task" [ 609.621902] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.630812] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962409, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.639223] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08f6b11-6ddb-4607-a402-c178c627ce6d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.649263] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbafbb2-db46-44ca-b7de-e926ec4b36bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.685691] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44133488-f886-4b37-a81b-508acddc9c13 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.695794] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0a35bc-2c13-4671-81a8-285cdbb5d766 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.717761] env[68279]: DEBUG nova.compute.provider_tree [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.843906] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962406, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536018} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.843906] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7/8782d86d-0e94-44b4-9595-b0eb2b2a3fb7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 609.843906] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.843906] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1003fb37-71e7-4143-a852-d02e67dd7e44 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.851725] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 609.851725] env[68279]: value = "task-2962410" [ 609.851725] env[68279]: _type = "Task" [ 609.851725] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.863337] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.875758] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526e1f0f-0002-c95c-028b-8ef64c65e290, 'name': SearchDatastore_Task, 'duration_secs': 0.012272} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.875758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.875758] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 3d3b5611-714f-4757-b848-891319c2fea3/3d3b5611-714f-4757-b848-891319c2fea3.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.875758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.875961] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.876077] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-716a5187-1dec-46c1-b20b-9eb1b30b76d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.878148] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4d6b9fb-3e7a-44f0-8486-4127b7ac0bd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.889758] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.889938] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.890781] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 609.890781] env[68279]: value = "task-2962411" [ 609.890781] env[68279]: _type = "Task" [ 609.890781] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.891040] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27562415-70db-4c30-be0b-a5e314686592 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.900111] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 609.900111] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f6d555-80f4-e33d-a29a-4e2f6b738e68" [ 609.900111] env[68279]: _type = "Task" [ 609.900111] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.906020] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962411, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.912197] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f6d555-80f4-e33d-a29a-4e2f6b738e68, 'name': SearchDatastore_Task, 'duration_secs': 0.008681} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.913290] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6d5bae7-4b99-4288-af3e-abdc3f3e0c0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.919229] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 609.919229] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f36e6b-1a96-cac2-ee3c-1127b94667d7" [ 609.919229] env[68279]: _type = "Task" [ 609.919229] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.927952] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f36e6b-1a96-cac2-ee3c-1127b94667d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.056783] env[68279]: DEBUG oslo_concurrency.lockutils [req-6c654bb8-fc97-4f8b-b952-fbe1dbc2a5bf req-d9346ce9-40f0-42c8-af59-573ef6870d97 service nova] Releasing lock "refresh_cache-3d3b5611-714f-4757-b848-891319c2fea3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.135688] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962409, 'name': CreateVM_Task, 'duration_secs': 0.360314} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.135688] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 610.135688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.135688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.135688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 610.135688] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32be507e-2dc2-4980-8abf-b429d270c919 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.142197] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 610.142197] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52275c56-a133-97a9-d4b9-824873f5bc4f" [ 610.142197] env[68279]: _type = "Task" [ 610.142197] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.153008] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52275c56-a133-97a9-d4b9-824873f5bc4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.224310] env[68279]: DEBUG nova.scheduler.client.report [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 610.366030] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068391} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.366030] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 610.366030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3197f889-e4d6-4ae8-8fa3-df719cba939d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.391181] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7/8782d86d-0e94-44b4-9595-b0eb2b2a3fb7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 610.392566] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20234e57-157c-4afc-88c6-8013a4c9e0b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.418721] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962411, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49334} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.420149] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 3d3b5611-714f-4757-b848-891319c2fea3/3d3b5611-714f-4757-b848-891319c2fea3.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.420538] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.420976] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 610.420976] env[68279]: value = "task-2962412" [ 610.420976] env[68279]: _type = "Task" [ 610.420976] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.424017] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a1ac5dd-c145-4da4-8ffb-b3555575cdc2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.436236] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 610.436236] env[68279]: value = "task-2962413" [ 610.436236] env[68279]: _type = "Task" [ 610.436236] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.441923] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f36e6b-1a96-cac2-ee3c-1127b94667d7, 'name': SearchDatastore_Task, 'duration_secs': 0.009481} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.446505] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.450025] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e65722bb-e39a-47e5-9aaf-87cfd27930d1/e65722bb-e39a-47e5-9aaf-87cfd27930d1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.451069] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-447826bd-b5e1-4114-bd65-fe86350825c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.453891] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.460281] env[68279]: DEBUG nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 610.462455] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962413, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.464683] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 610.464683] env[68279]: value = "task-2962414" [ 610.464683] env[68279]: _type = "Task" [ 610.464683] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.475824] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.520523] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 610.520523] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.520523] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.520523] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.520852] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.520852] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 610.520852] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 610.520852] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 610.523773] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 610.524129] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 610.524517] env[68279]: DEBUG nova.virt.hardware [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 610.531297] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9642a72f-bf70-4751-8f34-09748aaa67b8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.551064] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2685bd5f-c022-464d-96b9-e83daabce15c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.656558] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52275c56-a133-97a9-d4b9-824873f5bc4f, 'name': SearchDatastore_Task, 'duration_secs': 0.053688} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.656714] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.656967] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.657288] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.657440] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.657645] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.657927] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3523517-c011-4c81-a049-b45739150c2a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.670749] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.670944] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.674163] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a411e361-116b-48fa-b5f5-f4a5cc1dd397 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.683143] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 610.683143] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523e5d40-2e54-064f-7472-e39414e88db5" [ 610.683143] env[68279]: _type = "Task" [ 610.683143] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.697120] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523e5d40-2e54-064f-7472-e39414e88db5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.733126] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.733126] env[68279]: DEBUG nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 610.735555] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.463s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.737160] env[68279]: INFO nova.compute.claims [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.939139] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.951433] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962413, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074755} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.955151] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 610.956213] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52743cfc-925c-4379-9df7-af973585a127 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.979414] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 3d3b5611-714f-4757-b848-891319c2fea3/3d3b5611-714f-4757-b848-891319c2fea3.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 610.982516] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94216ab5-ad4e-4f41-b7e3-bf6f15eca76b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.005313] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505155} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.006606] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e65722bb-e39a-47e5-9aaf-87cfd27930d1/e65722bb-e39a-47e5-9aaf-87cfd27930d1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.006823] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.007132] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 611.007132] env[68279]: value = "task-2962415" [ 611.007132] env[68279]: _type = "Task" [ 611.007132] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.007430] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6936649-399d-4d34-82a2-9ce75b531d19 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.017812] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962415, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.020270] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 611.020270] env[68279]: value = "task-2962416" [ 611.020270] env[68279]: _type = "Task" [ 611.020270] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.197858] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523e5d40-2e54-064f-7472-e39414e88db5, 'name': SearchDatastore_Task, 'duration_secs': 0.045478} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.198709] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3745e313-98fe-4ff0-8d29-9253898ff793 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.208675] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 611.208675] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52894895-62c2-5621-50cf-109b6fb05994" [ 611.208675] env[68279]: _type = "Task" [ 611.208675] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.217470] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52894895-62c2-5621-50cf-109b6fb05994, 'name': SearchDatastore_Task, 'duration_secs': 0.008194} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.217470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.217754] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 611.217901] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a844e62-b367-44e2-bc16-e79a545310a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.227175] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 611.227175] env[68279]: value = "task-2962417" [ 611.227175] env[68279]: _type = "Task" [ 611.227175] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.235811] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.241493] env[68279]: DEBUG nova.compute.utils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 611.244966] env[68279]: DEBUG nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 611.244966] env[68279]: DEBUG nova.network.neutron [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.437997] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962412, 'name': ReconfigVM_Task, 'duration_secs': 0.624302} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.438408] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7/8782d86d-0e94-44b4-9595-b0eb2b2a3fb7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.438973] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-322241ec-ea0f-4ea7-8f0a-2db48e4ef627 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.446288] env[68279]: DEBUG nova.network.neutron [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Updated VIF entry in instance network info cache for port 6169f442-4572-4d81-9091-252e8a2afb74. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 611.446888] env[68279]: DEBUG nova.network.neutron [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Updating instance_info_cache with network_info: [{"id": "6169f442-4572-4d81-9091-252e8a2afb74", "address": "fa:16:3e:c9:75:5e", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6169f442-45", "ovs_interfaceid": "6169f442-4572-4d81-9091-252e8a2afb74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.448485] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 611.448485] env[68279]: value = "task-2962418" [ 611.448485] env[68279]: _type = "Task" [ 611.448485] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.462449] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962418, 'name': Rename_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.521953] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962415, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.534211] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962416, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077318} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.534559] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 611.535408] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcad05e1-7e7e-4f4c-b60d-12bf704cec26 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.565413] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] e65722bb-e39a-47e5-9aaf-87cfd27930d1/e65722bb-e39a-47e5-9aaf-87cfd27930d1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.567106] env[68279]: DEBUG nova.policy [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88074da303124c9db173ac0c253f5c27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36fa09849bed42f69be37a023b710523', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 611.569099] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6cc9ce6-c764-40df-9404-9fbdf653d39a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.594779] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 611.594779] env[68279]: value = "task-2962419" [ 611.594779] env[68279]: _type = "Task" [ 611.594779] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.606992] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962419, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.741444] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962417, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.745269] env[68279]: DEBUG nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 611.839111] env[68279]: DEBUG nova.network.neutron [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Successfully created port: 897f9e70-e215-4b51-8dec-f0e2b05f7b12 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.887989] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edebf8bb-9e6b-4942-b28a-ad0b10948014 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.896445] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627d3725-7ced-4233-a3e2-dabe10000256 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.929297] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7452e4a2-ec13-416c-bd8d-7ca3425412a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.937717] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0c560e-c624-4ffb-bd55-df86030d9c84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.954553] env[68279]: DEBUG nova.compute.provider_tree [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.964224] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Releasing lock "refresh_cache-e65722bb-e39a-47e5-9aaf-87cfd27930d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.964224] env[68279]: DEBUG nova.compute.manager [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Received event network-vif-plugged-ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.964224] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Acquiring lock "50e08259-7915-49bb-b137-5cc6e9d53c16-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.964224] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.964224] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.964530] env[68279]: DEBUG nova.compute.manager [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] No waiting events found dispatching network-vif-plugged-ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 611.964530] env[68279]: WARNING nova.compute.manager [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Received unexpected event network-vif-plugged-ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 for instance with vm_state building and task_state spawning. [ 611.964530] env[68279]: DEBUG nova.compute.manager [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Received event network-changed-ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.964530] env[68279]: DEBUG nova.compute.manager [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Refreshing instance network info cache due to event network-changed-ab5d6d31-7d88-47ee-a53a-80e39c3e2a72. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 611.964530] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Acquiring lock "refresh_cache-50e08259-7915-49bb-b137-5cc6e9d53c16" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.964678] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Acquired lock "refresh_cache-50e08259-7915-49bb-b137-5cc6e9d53c16" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.964678] env[68279]: DEBUG nova.network.neutron [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Refreshing network info cache for port ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.970780] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962418, 'name': Rename_Task, 'duration_secs': 0.439125} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.971652] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 611.971904] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41a2a2fb-11d3-471b-ba0a-3ae28c0b877b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.979087] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 611.979087] env[68279]: value = "task-2962420" [ 611.979087] env[68279]: _type = "Task" [ 611.979087] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.986942] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962420, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.019479] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962415, 'name': ReconfigVM_Task, 'duration_secs': 0.654397} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.019659] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 3d3b5611-714f-4757-b848-891319c2fea3/3d3b5611-714f-4757-b848-891319c2fea3.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.020284] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbb2345d-3501-46e9-bc2c-fe30fdec06cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.027336] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 612.027336] env[68279]: value = "task-2962421" [ 612.027336] env[68279]: _type = "Task" [ 612.027336] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.035338] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962421, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.104356] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962419, 'name': ReconfigVM_Task, 'duration_secs': 0.368642} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.104660] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Reconfigured VM instance instance-00000003 to attach disk [datastore1] e65722bb-e39a-47e5-9aaf-87cfd27930d1/e65722bb-e39a-47e5-9aaf-87cfd27930d1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.105291] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78ea28b8-c4a5-46c6-88ef-1417ef745efe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.111980] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 612.111980] env[68279]: value = "task-2962422" [ 612.111980] env[68279]: _type = "Task" [ 612.111980] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.121461] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962422, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.236315] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962417, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622911} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.236613] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.236757] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 612.237019] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7602b982-3009-4530-99e6-1e7bb3d3ee03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.244516] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 612.244516] env[68279]: value = "task-2962423" [ 612.244516] env[68279]: _type = "Task" [ 612.244516] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.256232] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962423, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.467096] env[68279]: DEBUG nova.scheduler.client.report [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 612.494391] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962420, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.542769] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962421, 'name': Rename_Task, 'duration_secs': 0.134774} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.544609] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.544609] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c381c1bc-824b-4793-b33e-84be45e5296e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.555065] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 612.555065] env[68279]: value = "task-2962424" [ 612.555065] env[68279]: _type = "Task" [ 612.555065] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.565938] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962424, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.623048] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962422, 'name': Rename_Task, 'duration_secs': 0.141847} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.625247] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.625618] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b559c3a3-b2e3-4f42-8816-7537106cd25e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.632565] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 612.632565] env[68279]: value = "task-2962425" [ 612.632565] env[68279]: _type = "Task" [ 612.632565] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.643896] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962425, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.764537] env[68279]: DEBUG nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 612.768535] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962423, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072489} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.769370] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.770471] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f7013f-df22-4627-a61f-625f3e499244 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.801098] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.803785] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71e8da71-bec7-4cd7-8368-71e7a3c02856 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.829436] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 612.829436] env[68279]: value = "task-2962426" [ 612.829436] env[68279]: _type = "Task" [ 612.829436] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.832428] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 612.832907] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.834358] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 612.834571] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.834721] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 612.834873] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 612.835620] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 612.835620] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 612.835620] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 612.835620] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 612.835840] env[68279]: DEBUG nova.virt.hardware [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 612.838806] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8279bcec-4724-4a0e-bf88-0b5ba8a82965 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.854469] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e024c798-15e6-4d37-8c72-9db6c4c8b8d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.859702] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962426, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.975152] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.975508] env[68279]: DEBUG nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 612.996061] env[68279]: DEBUG oslo_vmware.api [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962420, 'name': PowerOnVM_Task, 'duration_secs': 0.999553} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.996061] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 612.996061] env[68279]: INFO nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Took 15.25 seconds to spawn the instance on the hypervisor. [ 612.996388] env[68279]: DEBUG nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.997688] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a642f282-a76e-4a6a-85bd-9fc8961526c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.068483] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962424, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.150718] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.354380] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962426, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.485601] env[68279]: DEBUG nova.compute.utils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 613.485601] env[68279]: DEBUG nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 613.485601] env[68279]: DEBUG nova.network.neutron [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 613.522501] env[68279]: INFO nova.compute.manager [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Took 20.01 seconds to build instance. [ 613.571664] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962424, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.658038] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.825645] env[68279]: DEBUG nova.policy [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f72f0e3628e1438a80840cdf82642554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7bf4e6f720045e1854859d2966a887b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 613.859296] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962426, 'name': ReconfigVM_Task, 'duration_secs': 0.818189} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.859296] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.859296] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-475a5069-cccc-4831-9ae5-3489fee92a34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.861980] env[68279]: DEBUG nova.network.neutron [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Updated VIF entry in instance network info cache for port ab5d6d31-7d88-47ee-a53a-80e39c3e2a72. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 613.862436] env[68279]: DEBUG nova.network.neutron [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Updating instance_info_cache with network_info: [{"id": "ab5d6d31-7d88-47ee-a53a-80e39c3e2a72", "address": "fa:16:3e:a4:cc:3f", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab5d6d31-7d", "ovs_interfaceid": "ab5d6d31-7d88-47ee-a53a-80e39c3e2a72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.865906] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 613.865906] env[68279]: value = "task-2962427" [ 613.865906] env[68279]: _type = "Task" [ 613.865906] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.879859] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962427, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.923353] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "239d0522-5101-49e0-8d3b-85b54927cd21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.923604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "239d0522-5101-49e0-8d3b-85b54927cd21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.989172] env[68279]: DEBUG nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 614.026870] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee82072-aafb-4665-8ac1-cd52d200d71d tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.529s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.069154] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962424, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.091014] env[68279]: DEBUG nova.network.neutron [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Successfully created port: 16b424ba-6749-431c-bdc5-22c910ad0fe6 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.161895] env[68279]: DEBUG oslo_vmware.api [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962425, 'name': PowerOnVM_Task, 'duration_secs': 1.245867} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.162897] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.162897] env[68279]: INFO nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Took 11.99 seconds to spawn the instance on the hypervisor. [ 614.162897] env[68279]: DEBUG nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.164936] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ca281e-d1fe-40a2-9209-40e6dffd125c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.367621] env[68279]: DEBUG oslo_concurrency.lockutils [req-394c71f7-2087-48e8-b3cb-749c7c8c2a5a req-128d554c-d595-42ef-bceb-5db58d995b7d service nova] Releasing lock "refresh_cache-50e08259-7915-49bb-b137-5cc6e9d53c16" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.377269] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962427, 'name': Rename_Task, 'duration_secs': 0.258404} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.377698] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.377963] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-276e08f6-24aa-4613-b77c-713443f2e654 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.384767] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 614.384767] env[68279]: value = "task-2962428" [ 614.384767] env[68279]: _type = "Task" [ 614.384767] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.395031] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.426448] env[68279]: DEBUG nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 614.570379] env[68279]: DEBUG oslo_vmware.api [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962424, 'name': PowerOnVM_Task, 'duration_secs': 1.567211} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.570766] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.570766] env[68279]: INFO nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Took 14.62 seconds to spawn the instance on the hypervisor. [ 614.571183] env[68279]: DEBUG nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.572408] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5c4418-1f14-42cf-9d4a-a7c215533090 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.693459] env[68279]: INFO nova.compute.manager [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Took 18.48 seconds to build instance. [ 614.899988] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962428, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.965780] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.965780] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.968159] env[68279]: INFO nova.compute.claims [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.009599] env[68279]: DEBUG nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 615.056214] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 615.056527] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 615.056719] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 615.056937] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 615.057135] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 615.057426] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 615.057763] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 615.057763] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 615.057941] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 615.058306] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 615.058387] env[68279]: DEBUG nova.virt.hardware [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 615.059494] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64910e8-47bc-4acb-b512-e47a62b2f65d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.069476] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1dc06c5-2870-45ad-9398-1ef4e00f6b2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.095421] env[68279]: INFO nova.compute.manager [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Took 21.58 seconds to build instance. [ 615.196565] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fed37c2c-71e2-48e8-900c-6e59f7336b98 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.273800] env[68279]: DEBUG nova.network.neutron [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Successfully created port: e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.403348] env[68279]: DEBUG oslo_vmware.api [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962428, 'name': PowerOnVM_Task, 'duration_secs': 0.524377} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.403938] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.404252] env[68279]: INFO nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Took 11.04 seconds to spawn the instance on the hypervisor. [ 615.404989] env[68279]: DEBUG nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 615.407852] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7187d7-fdef-4039-bd8e-c3d4fd4e78fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.580712] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "b869231a-5293-433f-ac7c-d50030368826" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.580960] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "b869231a-5293-433f-ac7c-d50030368826" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.598809] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60b4e904-a432-4494-88b8-19d28a556b49 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "3d3b5611-714f-4757-b848-891319c2fea3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.086s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.930407] env[68279]: INFO nova.compute.manager [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Took 15.86 seconds to build instance. [ 616.083790] env[68279]: DEBUG nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.166069] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e4242d-ff7e-4040-b55f-2babcfae7c0c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.175850] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac86e447-7d40-4b7c-a5ef-bdb0526037c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.214283] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ad4cb9-d32d-4c81-be9a-c5e07106a89e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.222110] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd9968e-4001-470f-ad67-d0c242c97aa5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.238048] env[68279]: DEBUG nova.compute.provider_tree [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.434648] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52350b53-06bb-4526-a466-47a48abd3c41 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.382s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.629306] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.689937] env[68279]: DEBUG nova.network.neutron [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Successfully updated port: 897f9e70-e215-4b51-8dec-f0e2b05f7b12 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 616.742259] env[68279]: DEBUG nova.scheduler.client.report [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 617.196724] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "refresh_cache-1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.196724] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "refresh_cache-1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.196724] env[68279]: DEBUG nova.network.neutron [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.247910] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.248519] env[68279]: DEBUG nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 617.251462] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.622s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.253565] env[68279]: INFO nova.compute.claims [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.443063] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.443325] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.757837] env[68279]: DEBUG nova.network.neutron [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.760106] env[68279]: DEBUG nova.compute.utils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 617.769664] env[68279]: DEBUG nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 617.948500] env[68279]: DEBUG nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 618.086608] env[68279]: DEBUG nova.network.neutron [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Successfully updated port: 16b424ba-6749-431c-bdc5-22c910ad0fe6 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 618.222833] env[68279]: DEBUG nova.network.neutron [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Updating instance_info_cache with network_info: [{"id": "897f9e70-e215-4b51-8dec-f0e2b05f7b12", "address": "fa:16:3e:d1:32:2b", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap897f9e70-e2", "ovs_interfaceid": "897f9e70-e215-4b51-8dec-f0e2b05f7b12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.269864] env[68279]: DEBUG nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 618.398808] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "4c99c929-9fda-42f0-9327-0508ad3e6150" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.401847] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.437251] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.437425] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.471355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.507727] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616a7413-1cb3-4934-8336-8892366b7a61 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.519545] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4678c0f-b529-48e6-8f3a-201c58b543c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.556683] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8447ab7-2a6b-42fa-80b1-da2b2047ed28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.565335] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a861732d-82dc-418b-b8cb-51ec8c74eaa7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.581993] env[68279]: DEBUG nova.compute.provider_tree [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.588093] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.588193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.588446] env[68279]: DEBUG nova.network.neutron [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.615245] env[68279]: DEBUG nova.compute.manager [req-38baa7e1-d9df-4b0e-b278-6f26b5b7faf7 req-0598025d-d6cf-4267-86e9-9fe83147a1e4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Received event network-vif-plugged-897f9e70-e215-4b51-8dec-f0e2b05f7b12 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 618.615487] env[68279]: DEBUG oslo_concurrency.lockutils [req-38baa7e1-d9df-4b0e-b278-6f26b5b7faf7 req-0598025d-d6cf-4267-86e9-9fe83147a1e4 service nova] Acquiring lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.615712] env[68279]: DEBUG oslo_concurrency.lockutils [req-38baa7e1-d9df-4b0e-b278-6f26b5b7faf7 req-0598025d-d6cf-4267-86e9-9fe83147a1e4 service nova] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.615929] env[68279]: DEBUG oslo_concurrency.lockutils [req-38baa7e1-d9df-4b0e-b278-6f26b5b7faf7 req-0598025d-d6cf-4267-86e9-9fe83147a1e4 service nova] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.616119] env[68279]: DEBUG nova.compute.manager [req-38baa7e1-d9df-4b0e-b278-6f26b5b7faf7 req-0598025d-d6cf-4267-86e9-9fe83147a1e4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] No waiting events found dispatching network-vif-plugged-897f9e70-e215-4b51-8dec-f0e2b05f7b12 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 618.616275] env[68279]: WARNING nova.compute.manager [req-38baa7e1-d9df-4b0e-b278-6f26b5b7faf7 req-0598025d-d6cf-4267-86e9-9fe83147a1e4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Received unexpected event network-vif-plugged-897f9e70-e215-4b51-8dec-f0e2b05f7b12 for instance with vm_state building and task_state spawning. [ 618.725444] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "refresh_cache-1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.725749] env[68279]: DEBUG nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Instance network_info: |[{"id": "897f9e70-e215-4b51-8dec-f0e2b05f7b12", "address": "fa:16:3e:d1:32:2b", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap897f9e70-e2", "ovs_interfaceid": "897f9e70-e215-4b51-8dec-f0e2b05f7b12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 618.726192] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:32:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '897f9e70-e215-4b51-8dec-f0e2b05f7b12', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 618.736341] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 618.737168] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 618.737168] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d55dcb38-a395-4ae6-920d-b2c460cfb6e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.763576] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 618.763576] env[68279]: value = "task-2962429" [ 618.763576] env[68279]: _type = "Task" [ 618.763576] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.774997] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962429, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.889637] env[68279]: DEBUG nova.network.neutron [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Successfully updated port: e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 618.906033] env[68279]: DEBUG nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 618.939745] env[68279]: DEBUG nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 619.085366] env[68279]: DEBUG nova.scheduler.client.report [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.192052] env[68279]: DEBUG nova.network.neutron [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.279123] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962429, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.284815] env[68279]: DEBUG nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 619.332894] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.332894] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.332894] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.333138] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.333138] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.333138] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.333138] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.333138] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.333292] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.333836] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.334028] env[68279]: DEBUG nova.virt.hardware [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.337251] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61005cc5-40c2-4b05-b302-070c9f0a0460 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.347837] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f403b2b-c00c-416a-a04c-1950e77656c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.363930] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.371298] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Creating folder: Project (cdf3f7dab8514e528f713d80396f4c84). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.372558] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Acquiring lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.372558] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.372630] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Acquiring lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.372808] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.372973] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.374502] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19400c33-e3dd-4973-9f1f-2451b9765596 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.380380] env[68279]: INFO nova.compute.manager [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Terminating instance [ 619.393302] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.393446] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.394416] env[68279]: DEBUG nova.network.neutron [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.395596] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Created folder: Project (cdf3f7dab8514e528f713d80396f4c84) in parent group-v594445. [ 619.401269] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Creating folder: Instances. Parent ref: group-v594459. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.401269] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37a13bba-b1f9-46c6-b057-be4254a3c096 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.410894] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Created folder: Instances in parent group-v594459. [ 619.412189] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 619.413267] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.413600] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-068b0473-2104-43cd-aec3-baff36874c54 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.435968] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.435968] env[68279]: value = "task-2962432" [ 619.435968] env[68279]: _type = "Task" [ 619.435968] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.450349] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962432, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.456731] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.469669] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.563168] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "c9bda338-6c7d-4850-8f46-7cd916372ac9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.563168] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.594717] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.594805] env[68279]: DEBUG nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 619.597829] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.127s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.603835] env[68279]: INFO nova.compute.claims [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.774923] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962429, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.885968] env[68279]: DEBUG nova.compute.manager [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 619.885968] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.886611] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525dbcd3-6bcd-4e92-87f5-893e7043e419 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.895405] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 619.895500] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9571528e-0d95-4f3c-8dbf-a3c5f0e2d7f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.906033] env[68279]: DEBUG oslo_vmware.api [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Waiting for the task: (returnval){ [ 619.906033] env[68279]: value = "task-2962433" [ 619.906033] env[68279]: _type = "Task" [ 619.906033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.918051] env[68279]: DEBUG oslo_vmware.api [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Task: {'id': task-2962433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.951448] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962432, 'name': CreateVM_Task, 'duration_secs': 0.398943} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.951635] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 619.952602] env[68279]: DEBUG oslo_vmware.service [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bbc48b-568b-4bdd-a78e-4b84a5d1b8d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.961039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.961157] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.961470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 619.961722] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db3270a4-a703-4164-836f-1207f08a9533 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.968041] env[68279]: DEBUG nova.compute.manager [req-d4b7a623-9fbd-4d4b-ab3c-b5d44b3c5304 req-d0f39602-a454-4d22-a492-72fd6a97b059 service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Received event network-vif-plugged-16b424ba-6749-431c-bdc5-22c910ad0fe6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.968041] env[68279]: DEBUG oslo_concurrency.lockutils [req-d4b7a623-9fbd-4d4b-ab3c-b5d44b3c5304 req-d0f39602-a454-4d22-a492-72fd6a97b059 service nova] Acquiring lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.968531] env[68279]: DEBUG oslo_concurrency.lockutils [req-d4b7a623-9fbd-4d4b-ab3c-b5d44b3c5304 req-d0f39602-a454-4d22-a492-72fd6a97b059 service nova] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.969564] env[68279]: DEBUG oslo_concurrency.lockutils [req-d4b7a623-9fbd-4d4b-ab3c-b5d44b3c5304 req-d0f39602-a454-4d22-a492-72fd6a97b059 service nova] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.969564] env[68279]: DEBUG nova.compute.manager [req-d4b7a623-9fbd-4d4b-ab3c-b5d44b3c5304 req-d0f39602-a454-4d22-a492-72fd6a97b059 service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] No waiting events found dispatching network-vif-plugged-16b424ba-6749-431c-bdc5-22c910ad0fe6 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 619.970133] env[68279]: WARNING nova.compute.manager [req-d4b7a623-9fbd-4d4b-ab3c-b5d44b3c5304 req-d0f39602-a454-4d22-a492-72fd6a97b059 service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Received unexpected event network-vif-plugged-16b424ba-6749-431c-bdc5-22c910ad0fe6 for instance with vm_state building and task_state spawning. [ 619.971613] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 619.971613] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5297f816-d7c4-6f51-976a-2e15378aa3a2" [ 619.971613] env[68279]: _type = "Task" [ 619.971613] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.985465] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5297f816-d7c4-6f51-976a-2e15378aa3a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.995743] env[68279]: DEBUG nova.network.neutron [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance_info_cache with network_info: [{"id": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "address": "fa:16:3e:00:83:88", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b424ba-67", "ovs_interfaceid": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.027514] env[68279]: DEBUG nova.network.neutron [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.069198] env[68279]: DEBUG nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 620.111439] env[68279]: DEBUG nova.compute.utils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 620.113806] env[68279]: DEBUG nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.116612] env[68279]: DEBUG nova.network.neutron [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.273345] env[68279]: DEBUG nova.policy [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40f33a380ca4439f94675577a0988927', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fa34d494002422bb2a3bc28b641cdfd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 620.283406] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962429, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.420806] env[68279]: DEBUG oslo_vmware.api [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Task: {'id': task-2962433, 'name': PowerOffVM_Task, 'duration_secs': 0.217387} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.421134] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 620.421268] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 620.421582] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-140f2e80-d536-4f26-ad27-eb7c7cacb6f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.481962] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 620.482296] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 620.482753] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Deleting the datastore file [datastore1] 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 620.483814] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36a8f88e-1561-40a5-a3c3-58f7d2572118 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.493016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.493016] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.493016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.493016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.493235] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.493906] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1abc5ad-57ab-4b02-a2f0-230ecf35b3bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.498404] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.498749] env[68279]: DEBUG nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Instance network_info: |[{"id": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "address": "fa:16:3e:00:83:88", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b424ba-67", "ovs_interfaceid": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 620.499082] env[68279]: DEBUG oslo_vmware.api [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Waiting for the task: (returnval){ [ 620.499082] env[68279]: value = "task-2962435" [ 620.499082] env[68279]: _type = "Task" [ 620.499082] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.500092] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:83:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16b424ba-6749-431c-bdc5-22c910ad0fe6', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 620.507955] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Creating folder: Project (36fa09849bed42f69be37a023b710523). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.512655] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa5076c4-bd4b-4c03-960d-09cf67c99a61 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.518649] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.518649] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.518649] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23b54ee-ad41-4445-a20f-3ef8345c7b4c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.527816] env[68279]: DEBUG oslo_vmware.api [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Task: {'id': task-2962435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.533365] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-115ef484-a9b6-4eb3-a3a8-999399ec66bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.534420] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Created folder: Project (36fa09849bed42f69be37a023b710523) in parent group-v594445. [ 620.537773] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Creating folder: Instances. Parent ref: group-v594462. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.538143] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c35e2bb9-1c4c-4417-93a1-44ffa23dc35e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.543687] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 620.543687] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ba1f7-e05d-4cfb-37d4-1915e3dd808f" [ 620.543687] env[68279]: _type = "Task" [ 620.543687] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.549357] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Created folder: Instances in parent group-v594462. [ 620.551658] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.551658] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 620.551658] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f80245a0-758f-4eec-9b06-a086289cd729 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.573851] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ba1f7-e05d-4cfb-37d4-1915e3dd808f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.577272] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 620.577272] env[68279]: value = "task-2962438" [ 620.577272] env[68279]: _type = "Task" [ 620.577272] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.588913] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962438, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.603276] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.614024] env[68279]: DEBUG nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 620.702922] env[68279]: DEBUG nova.network.neutron [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updating instance_info_cache with network_info: [{"id": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "address": "fa:16:3e:05:7a:8e", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape402e4c3-f9", "ovs_interfaceid": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.781090] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962429, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.863201] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb2e327-687e-4830-b084-948986bc3b48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.872718] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2f46a5-5ca9-4014-bcf2-688735835109 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.911870] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353e757a-d940-43cc-9a94-71c850e3e33b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.922019] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36eeaa5-d28d-4577-8dd6-26c13ff55dbd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.934236] env[68279]: DEBUG nova.compute.provider_tree [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.018657] env[68279]: DEBUG oslo_vmware.api [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Task: {'id': task-2962435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155297} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.018657] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 621.018848] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 621.018995] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 621.019224] env[68279]: INFO nova.compute.manager [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 621.020628] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 621.020628] env[68279]: DEBUG nova.compute.manager [-] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 621.020628] env[68279]: DEBUG nova.network.neutron [-] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 621.054180] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Preparing fetch location {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 621.054456] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Creating directory with path [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 621.054696] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da0fba6b-a900-454e-8a94-52b9586cf86b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.067449] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Created directory with path [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.067666] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Fetch image to [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 621.067838] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Downloading image file data 01e502b7-2447-4972-9fe7-fd69f76ef71f to [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk on the data store datastore2 {{(pid=68279) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 621.068773] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9849dc-813d-4ea2-948f-634a65a5b424 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.077473] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3b7382-7e2c-4fd0-816d-9dca11391eca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.093859] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3219b98c-785f-46e6-ae6d-0a521fdaf727 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.098315] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962438, 'name': CreateVM_Task, 'duration_secs': 0.310717} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.098482] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 621.099530] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.099763] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.099979] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 621.100255] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-879f4a91-2fa1-4a33-85bc-0c40e50c2562 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.132175] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48705b4-ee31-4705-921a-9b4fb72d4281 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.136379] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 621.136379] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e71781-d501-4266-9586-a7650d4c42d5" [ 621.136379] env[68279]: _type = "Task" [ 621.136379] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.144236] env[68279]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cff3e15a-069c-404e-9e8a-91c48364bd2d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.150143] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.150392] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 621.150604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.176326] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Downloading image file data 01e502b7-2447-4972-9fe7-fd69f76ef71f to the data store datastore2 {{(pid=68279) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 621.204853] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Releasing lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.205213] env[68279]: DEBUG nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Instance network_info: |[{"id": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "address": "fa:16:3e:05:7a:8e", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape402e4c3-f9", "ovs_interfaceid": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 621.205665] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:7a:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e402e4c3-f9cc-469b-a10e-86b1f89eddad', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.214499] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Creating folder: Project (c7bf4e6f720045e1854859d2966a887b). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.216930] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f905be13-0bc5-443c-b52c-3cc3c52ffd06 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.228307] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Created folder: Project (c7bf4e6f720045e1854859d2966a887b) in parent group-v594445. [ 621.228537] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Creating folder: Instances. Parent ref: group-v594465. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.232469] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0f7b717-7945-4d73-9a92-d8870fba524f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.243922] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Created folder: Instances in parent group-v594465. [ 621.243922] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 621.243922] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 621.243922] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72de34c3-caf4-4a4f-b300-fe54ad322ebf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.266936] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.266936] env[68279]: value = "task-2962441" [ 621.266936] env[68279]: _type = "Task" [ 621.266936] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.278134] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962441, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.281961] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962429, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.287725] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 621.345458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.345596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.345794] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.345962] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.346134] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.348932] env[68279]: INFO nova.compute.manager [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Terminating instance [ 621.439544] env[68279]: DEBUG nova.scheduler.client.report [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 621.575013] env[68279]: DEBUG nova.network.neutron [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Successfully created port: 263143b2-0c62-4c8e-94f0-4801bce02f1b {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.637823] env[68279]: DEBUG nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 621.671889] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 621.672783] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.673221] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 621.674590] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.674590] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 621.674763] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 621.674875] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 621.675363] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 621.675363] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 621.675363] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 621.675646] env[68279]: DEBUG nova.virt.hardware [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 621.677494] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01afcdb3-2d89-4bb5-98db-54f6b410eedf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.686919] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9e66bc-0b93-4f43-aa47-49b7a2b5bbf5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.781118] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962441, 'name': CreateVM_Task, 'duration_secs': 0.373981} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.787297] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 621.787515] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962429, 'name': CreateVM_Task, 'duration_secs': 2.640197} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.788271] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.788538] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.788715] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 621.788967] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 621.789379] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17b828b6-cd8b-42eb-a9dc-c3d676339716 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.791507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.796257] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 621.796257] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525cba03-cae0-1356-6b4f-96dc09efda8e" [ 621.796257] env[68279]: _type = "Task" [ 621.796257] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.806033] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525cba03-cae0-1356-6b4f-96dc09efda8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.856150] env[68279]: DEBUG nova.compute.manager [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 621.856441] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 621.857340] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d68644a-2b05-459f-9479-94e0efff3612 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.865768] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 621.866053] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6c5cba0-743c-46ee-80bb-e12fc49e007f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.874048] env[68279]: DEBUG oslo_vmware.api [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 621.874048] env[68279]: value = "task-2962442" [ 621.874048] env[68279]: _type = "Task" [ 621.874048] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.885784] env[68279]: DEBUG oslo_vmware.api [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.946352] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.946972] env[68279]: DEBUG nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 621.952034] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.495s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.956046] env[68279]: INFO nova.compute.claims [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.095408] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Completed reading data from the image iterator. {{(pid=68279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 622.095408] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 622.155122] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Downloaded image file data 01e502b7-2447-4972-9fe7-fd69f76ef71f to vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk on the data store datastore2 {{(pid=68279) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 622.156927] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Caching image {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 622.157194] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Copying Virtual Disk [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk to [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.157497] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d607dee7-e091-44df-b633-36059b2d7306 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.170899] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 622.170899] env[68279]: value = "task-2962443" [ 622.170899] env[68279]: _type = "Task" [ 622.170899] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.179434] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.309804] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.310954] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.310954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.310954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.310954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 622.310954] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f60e4bd-6057-4d1f-bce0-407e265fa87c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.316707] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 622.316707] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f97a2c-87a3-1a10-dbe3-f02244209356" [ 622.316707] env[68279]: _type = "Task" [ 622.316707] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.329475] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f97a2c-87a3-1a10-dbe3-f02244209356, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.386567] env[68279]: DEBUG oslo_vmware.api [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962442, 'name': PowerOffVM_Task, 'duration_secs': 0.280156} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.386567] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 622.386764] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 622.386932] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7067104e-d16b-4b7e-a65f-468a15a7508c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.461474] env[68279]: DEBUG nova.compute.utils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 622.465049] env[68279]: DEBUG nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 622.465227] env[68279]: DEBUG nova.network.neutron [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 622.469079] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 622.469079] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 622.469547] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Deleting the datastore file [datastore1] e65722bb-e39a-47e5-9aaf-87cfd27930d1 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 622.469867] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb6f8b52-9b08-47f3-b66f-9ed67361a92d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.479345] env[68279]: DEBUG oslo_vmware.api [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for the task: (returnval){ [ 622.479345] env[68279]: value = "task-2962445" [ 622.479345] env[68279]: _type = "Task" [ 622.479345] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.488655] env[68279]: DEBUG oslo_vmware.api [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.621904] env[68279]: DEBUG nova.compute.manager [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Received event network-changed-897f9e70-e215-4b51-8dec-f0e2b05f7b12 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 622.622261] env[68279]: DEBUG nova.compute.manager [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Refreshing instance network info cache due to event network-changed-897f9e70-e215-4b51-8dec-f0e2b05f7b12. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 622.622671] env[68279]: DEBUG oslo_concurrency.lockutils [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] Acquiring lock "refresh_cache-1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.622738] env[68279]: DEBUG oslo_concurrency.lockutils [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] Acquired lock "refresh_cache-1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.622980] env[68279]: DEBUG nova.network.neutron [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Refreshing network info cache for port 897f9e70-e215-4b51-8dec-f0e2b05f7b12 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.650077] env[68279]: DEBUG nova.policy [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a96c614631284138bb9ad558099db092', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0703802e5fb4caf939a345174f379be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 622.674134] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "b40956fc-66f5-4bb6-8763-22465bb221bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.674134] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.690876] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962443, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.718504] env[68279]: DEBUG nova.network.neutron [-] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.830507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.830507] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.830507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.967385] env[68279]: DEBUG nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 622.997398] env[68279]: DEBUG oslo_vmware.api [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Task: {'id': task-2962445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251212} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.997398] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 622.997398] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 622.997398] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 622.997398] env[68279]: INFO nova.compute.manager [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 622.998759] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 622.998759] env[68279]: DEBUG nova.compute.manager [-] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 622.998759] env[68279]: DEBUG nova.network.neutron [-] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.185412] env[68279]: DEBUG nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 623.189065] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.854577} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.192303] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Copied Virtual Disk [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk to [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.192527] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Deleting the datastore file [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f/tmp-sparse.vmdk {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.192980] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33e4d00a-2c8c-461e-b146-9d2c53a8cd8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.201351] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 623.201351] env[68279]: value = "task-2962446" [ 623.201351] env[68279]: _type = "Task" [ 623.201351] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.211432] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.221387] env[68279]: INFO nova.compute.manager [-] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Took 2.20 seconds to deallocate network for instance. [ 623.253667] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1954485a-e0d8-42fb-a957-dc8db6db6cac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.265024] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29adab1-9467-42c5-a90b-16bee5aa32a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.294977] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f6a53c-47b6-4422-9e91-cfd3cc20df62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.303801] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b3ba2c-8e72-4386-8cc0-95fab425c5cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.313385] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.313986] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.326395] env[68279]: DEBUG nova.compute.provider_tree [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.715288] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030258} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.715708] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.715972] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.718038] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Moving file from [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302/01e502b7-2447-4972-9fe7-fd69f76ef71f to [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f. {{(pid=68279) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 623.718038] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-0bf13e14-f252-4e0b-91c2-933c6ad2b4f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.725083] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 623.725083] env[68279]: value = "task-2962447" [ 623.725083] env[68279]: _type = "Task" [ 623.725083] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.734755] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.735183] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962447, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.831074] env[68279]: DEBUG nova.scheduler.client.report [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 623.987623] env[68279]: DEBUG nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.029885] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.029885] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.030067] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.030195] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.030332] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.030548] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.030776] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.030937] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.031114] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.031279] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.031611] env[68279]: DEBUG nova.virt.hardware [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.032512] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1840cf-bbb1-40f5-b977-c21ddab3a353 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.045274] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13693c8-b8ff-4368-979e-664452b5ec66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.214015] env[68279]: DEBUG nova.compute.manager [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Received event network-changed-16b424ba-6749-431c-bdc5-22c910ad0fe6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 624.218141] env[68279]: DEBUG nova.compute.manager [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Refreshing instance network info cache due to event network-changed-16b424ba-6749-431c-bdc5-22c910ad0fe6. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 624.218395] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Acquiring lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.218535] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Acquired lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.218755] env[68279]: DEBUG nova.network.neutron [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Refreshing network info cache for port 16b424ba-6749-431c-bdc5-22c910ad0fe6 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 624.241451] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962447, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.026653} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.241451] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] File moved {{(pid=68279) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 624.241451] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Cleaning up location [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 624.241569] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Deleting the datastore file [datastore2] vmware_temp/52f021c1-04fb-4832-93bb-79eef4f56302 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.241700] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d21729d-6406-4c04-bc35-c3511d35eff8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.245421] env[68279]: DEBUG nova.network.neutron [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Updated VIF entry in instance network info cache for port 897f9e70-e215-4b51-8dec-f0e2b05f7b12. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 624.245733] env[68279]: DEBUG nova.network.neutron [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Updating instance_info_cache with network_info: [{"id": "897f9e70-e215-4b51-8dec-f0e2b05f7b12", "address": "fa:16:3e:d1:32:2b", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap897f9e70-e2", "ovs_interfaceid": "897f9e70-e215-4b51-8dec-f0e2b05f7b12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.255626] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 624.255626] env[68279]: value = "task-2962448" [ 624.255626] env[68279]: _type = "Task" [ 624.255626] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.270495] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.337228] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.337688] env[68279]: DEBUG nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 624.340633] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.871s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.342383] env[68279]: INFO nova.compute.claims [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.583475] env[68279]: DEBUG nova.network.neutron [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Successfully created port: 949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.752078] env[68279]: DEBUG oslo_concurrency.lockutils [req-8724eb63-eacb-45f3-967d-8cbee3e1baea req-7154dc8b-1055-4a29-aead-e3b6cac7c2c4 service nova] Releasing lock "refresh_cache-1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.766829] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027742} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.767152] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 624.768019] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4acc97c4-43ac-4c8a-bac0-759999208d7d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.777995] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 624.777995] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a99daf-f655-fb5a-3a04-fe99c97074c7" [ 624.777995] env[68279]: _type = "Task" [ 624.777995] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.785943] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a99daf-f655-fb5a-3a04-fe99c97074c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.841300] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "3d3b5611-714f-4757-b848-891319c2fea3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.841554] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "3d3b5611-714f-4757-b848-891319c2fea3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.841756] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "3d3b5611-714f-4757-b848-891319c2fea3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.842645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "3d3b5611-714f-4757-b848-891319c2fea3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.842645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "3d3b5611-714f-4757-b848-891319c2fea3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.844130] env[68279]: INFO nova.compute.manager [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Terminating instance [ 624.852664] env[68279]: DEBUG nova.compute.utils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 624.856603] env[68279]: DEBUG nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 624.856795] env[68279]: DEBUG nova.network.neutron [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 624.948775] env[68279]: DEBUG nova.network.neutron [-] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.216248] env[68279]: DEBUG nova.policy [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23c0491cf34b4ed1ae12a88d192a217b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c1f31d38eb648e1ad8f5355e5a90a06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 625.296164] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a99daf-f655-fb5a-3a04-fe99c97074c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008678} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.296539] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.296616] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 239d0522-5101-49e0-8d3b-85b54927cd21/239d0522-5101-49e0-8d3b-85b54927cd21.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 625.296862] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.297361] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.297543] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5acdd934-41b2-41f8-a680-61c576a276b8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.299943] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0573928-95ae-4ac9-bc18-45baa2e61749 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.307335] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 625.307335] env[68279]: value = "task-2962449" [ 625.307335] env[68279]: _type = "Task" [ 625.307335] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.312352] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 625.312532] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 625.314226] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-676b0888-5105-4536-afac-43c13f1d9a44 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.321623] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962449, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.327065] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 625.327065] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522426d7-84e9-b0ba-f43a-d2010d58932c" [ 625.327065] env[68279]: _type = "Task" [ 625.327065] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.334233] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522426d7-84e9-b0ba-f43a-d2010d58932c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.357825] env[68279]: DEBUG nova.compute.manager [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 625.358186] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 625.363937] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139d7175-bd71-457b-bea0-efb7aa36bdd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.377749] env[68279]: DEBUG nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 625.379936] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 625.380777] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2032f69-900b-4e56-831e-3c3f9f3b1e10 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.392176] env[68279]: DEBUG oslo_vmware.api [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 625.392176] env[68279]: value = "task-2962450" [ 625.392176] env[68279]: _type = "Task" [ 625.392176] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.408191] env[68279]: DEBUG oslo_vmware.api [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.451104] env[68279]: INFO nova.compute.manager [-] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Took 2.46 seconds to deallocate network for instance. [ 625.677344] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a27af5-46e5-4fc0-94bb-9b920f3b4bd9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.686145] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02b4de1-67e3-49d4-a8d2-105d6d387132 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.718899] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087b4aa0-6bd5-4ecb-9bac-fcdaa0c5f3c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.726939] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52390dbe-66ec-41b1-99b2-9202a14fcfb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.742155] env[68279]: DEBUG nova.compute.provider_tree [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.818322] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962449, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476438} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.820932] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 239d0522-5101-49e0-8d3b-85b54927cd21/239d0522-5101-49e0-8d3b-85b54927cd21.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 625.821218] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 625.821491] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cad7f429-0756-40f1-844b-e8c6369acc09 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.830897] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 625.830897] env[68279]: value = "task-2962451" [ 625.830897] env[68279]: _type = "Task" [ 625.830897] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.840337] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522426d7-84e9-b0ba-f43a-d2010d58932c, 'name': SearchDatastore_Task, 'duration_secs': 0.010075} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.841700] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d215d2ca-96c3-4877-8744-b1c8bd6396fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.850024] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.850881] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 625.850881] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5204b0ad-8dab-51ea-5dfc-a9fe296b996a" [ 625.850881] env[68279]: _type = "Task" [ 625.850881] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.861031] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5204b0ad-8dab-51ea-5dfc-a9fe296b996a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.904582] env[68279]: DEBUG oslo_vmware.api [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962450, 'name': PowerOffVM_Task, 'duration_secs': 0.238538} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.905199] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 625.905199] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 625.906171] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b76fb398-53ac-484c-b501-4b8dded719f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.908732] env[68279]: DEBUG nova.network.neutron [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Successfully updated port: 263143b2-0c62-4c8e-94f0-4801bce02f1b {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 625.960465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.977888] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 625.978128] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 625.978322] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Deleting the datastore file [datastore1] 3d3b5611-714f-4757-b848-891319c2fea3 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 625.978581] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27ffb8d0-104f-48c0-803e-c92d88e8f079 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.985373] env[68279]: DEBUG oslo_vmware.api [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for the task: (returnval){ [ 625.985373] env[68279]: value = "task-2962453" [ 625.985373] env[68279]: _type = "Task" [ 625.985373] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.998258] env[68279]: DEBUG oslo_vmware.api [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.028260] env[68279]: DEBUG nova.network.neutron [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updated VIF entry in instance network info cache for port 16b424ba-6749-431c-bdc5-22c910ad0fe6. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.028260] env[68279]: DEBUG nova.network.neutron [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance_info_cache with network_info: [{"id": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "address": "fa:16:3e:00:83:88", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b424ba-67", "ovs_interfaceid": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.249146] env[68279]: DEBUG nova.scheduler.client.report [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.343980] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065265} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.345233] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 626.348813] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd804a37-8840-4336-9481-3ea4da119d95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.370804] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 239d0522-5101-49e0-8d3b-85b54927cd21/239d0522-5101-49e0-8d3b-85b54927cd21.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 626.374118] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fc3b315-a1fb-4347-950a-31420d3bd566 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.393953] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5204b0ad-8dab-51ea-5dfc-a9fe296b996a, 'name': SearchDatastore_Task, 'duration_secs': 0.008579} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.397735] env[68279]: DEBUG nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 626.400031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.400315] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1/6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 626.400634] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 626.400634] env[68279]: value = "task-2962454" [ 626.400634] env[68279]: _type = "Task" [ 626.400634] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.401221] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.401450] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.401671] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2e3dcb5-d544-4bd2-98e6-f7045395a6c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.404335] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bae631c-d1fd-4319-851c-40f2652e11f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.417769] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "refresh_cache-b869231a-5293-433f-ac7c-d50030368826" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.418119] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquired lock "refresh_cache-b869231a-5293-433f-ac7c-d50030368826" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.418119] env[68279]: DEBUG nova.network.neutron [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.419749] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.422156] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 626.422156] env[68279]: value = "task-2962455" [ 626.422156] env[68279]: _type = "Task" [ 626.422156] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.422408] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.422578] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.423625] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be7ee14-4820-428d-98f5-c2d1f37a0670 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.432827] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 626.432827] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524236bf-78e0-31d6-6c11-8750be0347f8" [ 626.432827] env[68279]: _type = "Task" [ 626.432827] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.439422] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.451768] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524236bf-78e0-31d6-6c11-8750be0347f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.454275] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 626.454580] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.454700] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 626.454874] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.455027] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 626.455173] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 626.455396] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 626.455549] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 626.455761] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 626.455892] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 626.456103] env[68279]: DEBUG nova.virt.hardware [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 626.456999] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efca515-b5ac-42f6-8069-781f37e22e5b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.465176] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d012bf-0cf1-4db1-a5a7-fb3dedd273b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.494625] env[68279]: DEBUG oslo_vmware.api [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Task: {'id': task-2962453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146211} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.494899] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 626.495117] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 626.495320] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 626.495507] env[68279]: INFO nova.compute.manager [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 626.495831] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 626.495987] env[68279]: DEBUG nova.compute.manager [-] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 626.496110] env[68279]: DEBUG nova.network.neutron [-] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.530313] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Releasing lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.530643] env[68279]: DEBUG nova.compute.manager [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Received event network-vif-plugged-e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 626.530863] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Acquiring lock "6b778e98-12c2-42a5-a772-06ea32d090b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.531296] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.531402] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.531500] env[68279]: DEBUG nova.compute.manager [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] No waiting events found dispatching network-vif-plugged-e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 626.531735] env[68279]: WARNING nova.compute.manager [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Received unexpected event network-vif-plugged-e402e4c3-f9cc-469b-a10e-86b1f89eddad for instance with vm_state building and task_state spawning. [ 626.531932] env[68279]: DEBUG nova.compute.manager [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Received event network-changed-e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 626.532183] env[68279]: DEBUG nova.compute.manager [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Refreshing instance network info cache due to event network-changed-e402e4c3-f9cc-469b-a10e-86b1f89eddad. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 626.532378] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Acquiring lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.532549] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Acquired lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.532735] env[68279]: DEBUG nova.network.neutron [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Refreshing network info cache for port e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 626.724058] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "41d87520-2f40-4313-a14f-84688e979ac2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.724433] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "41d87520-2f40-4313-a14f-84688e979ac2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.754180] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.755064] env[68279]: DEBUG nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 626.759521] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.156s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.764071] env[68279]: INFO nova.compute.claims [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 626.918217] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.936849] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962455, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.946580] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524236bf-78e0-31d6-6c11-8750be0347f8, 'name': SearchDatastore_Task, 'duration_secs': 0.015375} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.947831] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c59cfcb1-7b48-4ce0-9b8e-947e555fa551 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.953362] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 626.953362] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f3a2f1-590c-536e-1f58-f741c073c9ff" [ 626.953362] env[68279]: _type = "Task" [ 626.953362] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.962011] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f3a2f1-590c-536e-1f58-f741c073c9ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.090466] env[68279]: DEBUG nova.network.neutron [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.271275] env[68279]: DEBUG nova.compute.utils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 627.276046] env[68279]: DEBUG nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 627.276286] env[68279]: DEBUG nova.network.neutron [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 627.283634] env[68279]: DEBUG nova.network.neutron [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Successfully created port: 5340623b-93eb-400e-be25-8fc583cdfe46 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.351814] env[68279]: DEBUG nova.compute.manager [req-d9c8a20e-5506-4a4e-8922-4da472bd515c req-70ab092f-3a19-4196-b43a-46d7c2d185f4 service nova] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Received event network-vif-deleted-3824fbbc-b1fe-488c-a0d7-75f277ff669a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.423117] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962454, 'name': ReconfigVM_Task, 'duration_secs': 0.63129} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.423675] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 239d0522-5101-49e0-8d3b-85b54927cd21/239d0522-5101-49e0-8d3b-85b54927cd21.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.424275] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0a8cef2-7041-48eb-94f5-36f2384bffe8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.432143] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 627.432143] env[68279]: value = "task-2962456" [ 627.432143] env[68279]: _type = "Task" [ 627.432143] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.439260] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554005} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.439815] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1/6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 627.440399] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 627.440653] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df3e0d64-fefa-4904-b767-ffc50cf66412 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.447926] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962456, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.452598] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 627.452598] env[68279]: value = "task-2962457" [ 627.452598] env[68279]: _type = "Task" [ 627.452598] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.467810] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962457, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.471633] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f3a2f1-590c-536e-1f58-f741c073c9ff, 'name': SearchDatastore_Task, 'duration_secs': 0.013052} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.471900] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.473628] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 6b778e98-12c2-42a5-a772-06ea32d090b8/6b778e98-12c2-42a5-a772-06ea32d090b8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.473957] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.474705] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.474705] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-992f77ed-5c17-482a-8587-3632595a14a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.476517] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beeb4979-b38e-4d50-bb11-2ad04e2ca29c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.483271] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 627.483271] env[68279]: value = "task-2962458" [ 627.483271] env[68279]: _type = "Task" [ 627.483271] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.488564] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.488761] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.490355] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-754057da-ee1e-410d-8e96-f3fbde6be13f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.499484] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962458, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.503332] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 627.503332] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d9d899-30d0-6ce4-2252-b6488c0933d9" [ 627.503332] env[68279]: _type = "Task" [ 627.503332] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.513556] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d9d899-30d0-6ce4-2252-b6488c0933d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.558111] env[68279]: DEBUG nova.policy [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22a4cadc191d4e9fa023eff168c8ddf9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e382d6dee334cd2bcf097cbe56f1143', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 627.738841] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.739101] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.781370] env[68279]: DEBUG nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 627.949211] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962456, 'name': Rename_Task, 'duration_secs': 0.142236} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.949211] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 627.949497] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ad8261d-57a3-4c65-86fe-13bf755e00d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.959798] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 627.959798] env[68279]: value = "task-2962459" [ 627.959798] env[68279]: _type = "Task" [ 627.959798] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.966392] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962457, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070088} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.967109] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 627.968162] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988b6c6c-acb3-4834-b68d-46d8b05bfabf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.973174] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962459, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.999989] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1/6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.006291] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e7d7dba-09cd-4109-b304-f5970c294f64 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.028634] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962458, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.034199] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 628.034199] env[68279]: value = "task-2962460" [ 628.034199] env[68279]: _type = "Task" [ 628.034199] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.034461] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d9d899-30d0-6ce4-2252-b6488c0933d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010285} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.041037] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff6fc4c3-0895-41f6-9cb6-1f370bc1db64 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.047982] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.050259] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 628.050259] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526b90eb-01be-bf74-57b2-96ebb26513b2" [ 628.050259] env[68279]: _type = "Task" [ 628.050259] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.062067] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526b90eb-01be-bf74-57b2-96ebb26513b2, 'name': SearchDatastore_Task, 'duration_secs': 0.01011} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.062329] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.062591] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb/1d0f383f-6bf9-42d0-b6c6-1f276eb181cb.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.062868] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a27cb888-8c5c-4117-a87a-b761b9b24477 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.069353] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 628.069353] env[68279]: value = "task-2962461" [ 628.069353] env[68279]: _type = "Task" [ 628.069353] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.077372] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.115266] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066e4e0c-bbfd-4180-9715-e05250eae9cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.123996] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5356067-fccf-4f04-aa04-76d4a3fbb8d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.156129] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79783606-e101-48e8-a741-0c9eaae30c60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.166639] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9174fd0-3df5-484a-8a4a-f57ec116fc2a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.179742] env[68279]: DEBUG nova.compute.provider_tree [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.192894] env[68279]: DEBUG nova.network.neutron [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Updating instance_info_cache with network_info: [{"id": "263143b2-0c62-4c8e-94f0-4801bce02f1b", "address": "fa:16:3e:f0:9f:ae", "network": {"id": "fff8bd5f-b587-4a76-aae0-e8c05562135b", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-218638832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fa34d494002422bb2a3bc28b641cdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap263143b2-0c", "ovs_interfaceid": "263143b2-0c62-4c8e-94f0-4801bce02f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.258369] env[68279]: DEBUG nova.network.neutron [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updated VIF entry in instance network info cache for port e402e4c3-f9cc-469b-a10e-86b1f89eddad. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.258718] env[68279]: DEBUG nova.network.neutron [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updating instance_info_cache with network_info: [{"id": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "address": "fa:16:3e:05:7a:8e", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape402e4c3-f9", "ovs_interfaceid": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.470540] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962459, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.500951] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962458, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.541604] env[68279]: DEBUG nova.network.neutron [-] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.556755] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.561241] env[68279]: DEBUG nova.compute.manager [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Received event network-vif-deleted-6169f442-4572-4d81-9091-252e8a2afb74 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.561484] env[68279]: DEBUG nova.compute.manager [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Received event network-vif-plugged-263143b2-0c62-4c8e-94f0-4801bce02f1b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.561715] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] Acquiring lock "b869231a-5293-433f-ac7c-d50030368826-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.561949] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] Lock "b869231a-5293-433f-ac7c-d50030368826-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.562499] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] Lock "b869231a-5293-433f-ac7c-d50030368826-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.562499] env[68279]: DEBUG nova.compute.manager [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] No waiting events found dispatching network-vif-plugged-263143b2-0c62-4c8e-94f0-4801bce02f1b {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 628.562499] env[68279]: WARNING nova.compute.manager [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Received unexpected event network-vif-plugged-263143b2-0c62-4c8e-94f0-4801bce02f1b for instance with vm_state building and task_state spawning. [ 628.562669] env[68279]: DEBUG nova.compute.manager [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Received event network-changed-263143b2-0c62-4c8e-94f0-4801bce02f1b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.562831] env[68279]: DEBUG nova.compute.manager [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Refreshing instance network info cache due to event network-changed-263143b2-0c62-4c8e-94f0-4801bce02f1b. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 628.563274] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] Acquiring lock "refresh_cache-b869231a-5293-433f-ac7c-d50030368826" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.581756] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.682841] env[68279]: DEBUG nova.scheduler.client.report [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 628.695991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Releasing lock "refresh_cache-b869231a-5293-433f-ac7c-d50030368826" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.695991] env[68279]: DEBUG nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Instance network_info: |[{"id": "263143b2-0c62-4c8e-94f0-4801bce02f1b", "address": "fa:16:3e:f0:9f:ae", "network": {"id": "fff8bd5f-b587-4a76-aae0-e8c05562135b", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-218638832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fa34d494002422bb2a3bc28b641cdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap263143b2-0c", "ovs_interfaceid": "263143b2-0c62-4c8e-94f0-4801bce02f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 628.697648] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] Acquired lock "refresh_cache-b869231a-5293-433f-ac7c-d50030368826" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.697648] env[68279]: DEBUG nova.network.neutron [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Refreshing network info cache for port 263143b2-0c62-4c8e-94f0-4801bce02f1b {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.698159] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:9f:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '263143b2-0c62-4c8e-94f0-4801bce02f1b', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.707805] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Creating folder: Project (4fa34d494002422bb2a3bc28b641cdfd). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.710090] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbe1b041-c68d-444c-9cee-2de698ee5383 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.721236] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Created folder: Project (4fa34d494002422bb2a3bc28b641cdfd) in parent group-v594445. [ 628.721439] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Creating folder: Instances. Parent ref: group-v594468. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.721686] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e221250-f44f-4b1c-918e-c9851a741af4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.735350] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Created folder: Instances in parent group-v594468. [ 628.735350] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 628.735350] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b869231a-5293-433f-ac7c-d50030368826] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 628.735350] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35d3a69b-6161-4324-bbd9-defe0920ae48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.752260] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.752260] env[68279]: value = "task-2962464" [ 628.752260] env[68279]: _type = "Task" [ 628.752260] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.760475] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962464, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.763760] env[68279]: DEBUG oslo_concurrency.lockutils [req-e875d40a-320e-4595-9348-4967cf7451d0 req-cb08b943-405e-46a7-903f-7b4ae59e4b7e service nova] Releasing lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.797439] env[68279]: DEBUG nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 628.834718] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:43:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='518525242',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-216844737',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 628.835234] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.835479] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 628.835745] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.835906] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 628.836148] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 628.836514] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 628.837180] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 628.837446] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 628.837823] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 628.838219] env[68279]: DEBUG nova.virt.hardware [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 628.839862] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ab147e-9a0c-4608-acd3-442fdf2d49d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.851069] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a29ff51-9260-44a2-829a-899e71958b47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.976028] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962459, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.006140] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962458, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.050851] env[68279]: INFO nova.compute.manager [-] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Took 2.55 seconds to deallocate network for instance. [ 629.051965] env[68279]: DEBUG nova.network.neutron [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Successfully updated port: 949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.063850] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.065345] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.065567] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquired lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.067253] env[68279]: DEBUG nova.network.neutron [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.084749] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962461, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.189056] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.190211] env[68279]: DEBUG nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 629.194026] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.478s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.200959] env[68279]: INFO nova.compute.claims [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 629.267020] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962464, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.316215] env[68279]: DEBUG nova.network.neutron [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Successfully created port: bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.478599] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962459, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.502830] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962458, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.731878} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.503446] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 6b778e98-12c2-42a5-a772-06ea32d090b8/6b778e98-12c2-42a5-a772-06ea32d090b8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.503718] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.504015] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61b108fe-e21a-4430-ba7b-2646e7eb6955 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.511460] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 629.511460] env[68279]: value = "task-2962465" [ 629.511460] env[68279]: _type = "Task" [ 629.511460] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.522420] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.556479] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962460, 'name': ReconfigVM_Task, 'duration_secs': 1.326856} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.556682] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1/6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.557427] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c15a377e-d1f2-4125-a5ed-00f40d3f99c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.564805] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 629.564805] env[68279]: value = "task-2962466" [ 629.564805] env[68279]: _type = "Task" [ 629.564805] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.566464] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.581429] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962466, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.586814] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962461, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.136628} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.587573] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb/1d0f383f-6bf9-42d0-b6c6-1f276eb181cb.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.587573] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.588406] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9701f7a2-5c4d-46c5-9dba-76e5554f84bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.594400] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 629.594400] env[68279]: value = "task-2962467" [ 629.594400] env[68279]: _type = "Task" [ 629.594400] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.608368] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962467, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.704915] env[68279]: DEBUG nova.compute.utils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 629.710940] env[68279]: DEBUG nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 629.712232] env[68279]: DEBUG nova.network.neutron [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 629.764507] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962464, 'name': CreateVM_Task, 'duration_secs': 0.596043} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.764686] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b869231a-5293-433f-ac7c-d50030368826] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 629.765357] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.765589] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.765920] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 629.766188] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb4d1f82-b844-4cd9-a3e2-f6affda1620e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.771853] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 629.771853] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ca0996-614b-19dc-4562-b1caf7316170" [ 629.771853] env[68279]: _type = "Task" [ 629.771853] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.781781] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ca0996-614b-19dc-4562-b1caf7316170, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.896514] env[68279]: DEBUG nova.network.neutron [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.982549] env[68279]: DEBUG oslo_vmware.api [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962459, 'name': PowerOnVM_Task, 'duration_secs': 1.713597} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.983141] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.983346] env[68279]: INFO nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Took 10.70 seconds to spawn the instance on the hypervisor. [ 629.983522] env[68279]: DEBUG nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 629.984589] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2909019e-a489-435f-ab8a-31af4c4e19d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.021785] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061213} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.022338] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.023220] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6c370c-53de-45c7-861e-80fd9956e153 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.051298] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 6b778e98-12c2-42a5-a772-06ea32d090b8/6b778e98-12c2-42a5-a772-06ea32d090b8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.053307] env[68279]: DEBUG nova.policy [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80a2118a1dfa4adebf6cf8d7ac00228d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce23f4f8da064cdeb40d4c6733be5338', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 630.055083] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07b8ba93-e8ff-4233-b1f2-9b3f8d4c8c5a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.084130] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962466, 'name': Rename_Task, 'duration_secs': 0.328184} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.088695] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 630.089040] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 630.089040] env[68279]: value = "task-2962468" [ 630.089040] env[68279]: _type = "Task" [ 630.089040] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.089394] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f452e30d-55e6-424a-af28-21858aadb969 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.102402] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 630.102402] env[68279]: value = "task-2962469" [ 630.102402] env[68279]: _type = "Task" [ 630.102402] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.102667] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.108955] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962467, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065773} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.109527] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.110539] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee34ec3b-52ca-4ac7-b390-d7a04847462f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.115665] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962469, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.136298] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb/1d0f383f-6bf9-42d0-b6c6-1f276eb181cb.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.136610] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07ddf63b-42c0-4135-8774-3230b815c715 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.156396] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 630.156396] env[68279]: value = "task-2962470" [ 630.156396] env[68279]: _type = "Task" [ 630.156396] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.164526] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962470, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.218713] env[68279]: DEBUG nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 630.284768] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ca0996-614b-19dc-4562-b1caf7316170, 'name': SearchDatastore_Task, 'duration_secs': 0.022572} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.285730] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.287776] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.287776] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.287776] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.287776] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 630.287776] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7db9b943-d80a-436e-9b62-7319d59bdf5a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.301763] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 630.301939] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 630.302699] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a91e293-cfd0-4f54-976e-2915766f3b37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.312285] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 630.312285] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52eadead-bfe5-e86b-b3c5-687eef98488e" [ 630.312285] env[68279]: _type = "Task" [ 630.312285] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.325235] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52eadead-bfe5-e86b-b3c5-687eef98488e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.380148] env[68279]: DEBUG nova.network.neutron [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Updated VIF entry in instance network info cache for port 263143b2-0c62-4c8e-94f0-4801bce02f1b. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.380416] env[68279]: DEBUG nova.network.neutron [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Updating instance_info_cache with network_info: [{"id": "263143b2-0c62-4c8e-94f0-4801bce02f1b", "address": "fa:16:3e:f0:9f:ae", "network": {"id": "fff8bd5f-b587-4a76-aae0-e8c05562135b", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-218638832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fa34d494002422bb2a3bc28b641cdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap263143b2-0c", "ovs_interfaceid": "263143b2-0c62-4c8e-94f0-4801bce02f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.508263] env[68279]: INFO nova.compute.manager [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Took 15.57 seconds to build instance. [ 630.566998] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e233f479-de5a-4e81-8781-2b372ef956a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.575614] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cfc123-395f-4508-bf9f-0d2fa325ef0b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.616747] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f89141-066d-4a9f-8554-5e0569b132e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.624856] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.630116] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962469, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.631417] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eee3c95-f63d-4ced-878f-a5d0e77f1e1a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.636968] env[68279]: DEBUG nova.network.neutron [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updating instance_info_cache with network_info: [{"id": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "address": "fa:16:3e:ad:43:12", "network": {"id": "d2cab8c6-aa6c-4741-8ffe-4e7a4f40a698", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2022110668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0703802e5fb4caf939a345174f379be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949c2fdc-85", "ovs_interfaceid": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.648826] env[68279]: DEBUG nova.compute.provider_tree [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.667500] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962470, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.830797] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52eadead-bfe5-e86b-b3c5-687eef98488e, 'name': SearchDatastore_Task, 'duration_secs': 0.030113} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.832465] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01c49088-0f39-45ec-a7a8-2a53962d6b50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.840779] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 630.840779] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521e7351-9ff4-c0b8-97ee-db919976a031" [ 630.840779] env[68279]: _type = "Task" [ 630.840779] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.853804] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521e7351-9ff4-c0b8-97ee-db919976a031, 'name': SearchDatastore_Task, 'duration_secs': 0.009562} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.854109] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.854593] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b869231a-5293-433f-ac7c-d50030368826/b869231a-5293-433f-ac7c-d50030368826.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 630.854689] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b898a922-0a1a-4b4c-8c28-e149db18c982 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.861720] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 630.861720] env[68279]: value = "task-2962471" [ 630.861720] env[68279]: _type = "Task" [ 630.861720] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.870519] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.883126] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdbf53bb-1c93-4f2d-b365-60e6ff544837 req-03a73fc7-44c5-47da-a0eb-bf7c782aacf4 service nova] Releasing lock "refresh_cache-b869231a-5293-433f-ac7c-d50030368826" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.011950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a7935365-e4ac-4351-9587-5344bfd8f651 tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "239d0522-5101-49e0-8d3b-85b54927cd21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.087s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.072170] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "336b7399-b64e-411f-99bc-ba0d292e371a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.072506] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "336b7399-b64e-411f-99bc-ba0d292e371a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.126895] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962469, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.129895] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962468, 'name': ReconfigVM_Task, 'duration_secs': 0.804611} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.130673] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 6b778e98-12c2-42a5-a772-06ea32d090b8/6b778e98-12c2-42a5-a772-06ea32d090b8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.131301] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abacaa65-bf41-475b-b7c7-5cd9af2baf32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.138275] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 631.138275] env[68279]: value = "task-2962472" [ 631.138275] env[68279]: _type = "Task" [ 631.138275] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.138762] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Releasing lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.138941] env[68279]: DEBUG nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Instance network_info: |[{"id": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "address": "fa:16:3e:ad:43:12", "network": {"id": "d2cab8c6-aa6c-4741-8ffe-4e7a4f40a698", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2022110668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0703802e5fb4caf939a345174f379be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949c2fdc-85", "ovs_interfaceid": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.139901] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:43:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33fdc099-7497-41c1-b40c-1558937132d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.147848] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Creating folder: Project (a0703802e5fb4caf939a345174f379be). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.151653] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0c17f1a-1a3e-4fdc-93f8-e4c6bc7443a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.154334] env[68279]: DEBUG nova.scheduler.client.report [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 631.167870] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962472, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.169401] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Created folder: Project (a0703802e5fb4caf939a345174f379be) in parent group-v594445. [ 631.169401] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Creating folder: Instances. Parent ref: group-v594471. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.169401] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37e008d2-1751-4a78-80b0-965496c1b984 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.173919] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962470, 'name': ReconfigVM_Task, 'duration_secs': 0.615609} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.174431] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb/1d0f383f-6bf9-42d0-b6c6-1f276eb181cb.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.175364] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c598b8d-ca26-46c1-a7fa-c37b19879e49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.181782] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 631.181782] env[68279]: value = "task-2962475" [ 631.181782] env[68279]: _type = "Task" [ 631.181782] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.182742] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Created folder: Instances in parent group-v594471. [ 631.182742] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.190041] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.190041] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8b275ac-04aa-4a57-9bc5-96e2ffd0a89b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.211397] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962475, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.212634] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.212634] env[68279]: value = "task-2962476" [ 631.212634] env[68279]: _type = "Task" [ 631.212634] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.220996] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962476, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.242355] env[68279]: DEBUG nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 631.273590] env[68279]: DEBUG nova.compute.manager [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Received event network-vif-plugged-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 631.273826] env[68279]: DEBUG oslo_concurrency.lockutils [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] Acquiring lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.274049] env[68279]: DEBUG oslo_concurrency.lockutils [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.274678] env[68279]: DEBUG oslo_concurrency.lockutils [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.274678] env[68279]: DEBUG nova.compute.manager [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] No waiting events found dispatching network-vif-plugged-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 631.274678] env[68279]: WARNING nova.compute.manager [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Received unexpected event network-vif-plugged-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d for instance with vm_state building and task_state spawning. [ 631.274827] env[68279]: DEBUG nova.compute.manager [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Received event network-changed-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 631.274893] env[68279]: DEBUG nova.compute.manager [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Refreshing instance network info cache due to event network-changed-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 631.275222] env[68279]: DEBUG oslo_concurrency.lockutils [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] Acquiring lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.275350] env[68279]: DEBUG oslo_concurrency.lockutils [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] Acquired lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.275504] env[68279]: DEBUG nova.network.neutron [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Refreshing network info cache for port 949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 631.292777] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 631.293138] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 631.293373] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 631.293560] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 631.294068] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 631.294262] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 631.294528] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 631.294887] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 631.295446] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 631.295446] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 631.295636] env[68279]: DEBUG nova.virt.hardware [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 631.297293] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca61e56-ddbe-43c4-ab8b-556c9b4601a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.308260] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c149b0d5-2a06-4f68-80ab-163db07c3d41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.372373] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962471, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.517181] env[68279]: DEBUG nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.626522] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962469, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.633439] env[68279]: DEBUG nova.network.neutron [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Successfully updated port: 5340623b-93eb-400e-be25-8fc583cdfe46 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 631.653195] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962472, 'name': Rename_Task, 'duration_secs': 0.405588} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.653327] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.653612] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76c72954-ba7a-4f31-a51a-2a262921ea36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.661583] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 631.661583] env[68279]: value = "task-2962477" [ 631.661583] env[68279]: _type = "Task" [ 631.661583] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.666462] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.667504] env[68279]: DEBUG nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 631.671022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.936s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.671274] env[68279]: DEBUG nova.objects.instance [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Lazy-loading 'resources' on Instance uuid 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 631.678369] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.680838] env[68279]: DEBUG nova.network.neutron [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Successfully created port: 1e64662e-baf7-4c5a-9f9c-387637e18c28 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.694184] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962475, 'name': Rename_Task, 'duration_secs': 0.316873} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.694620] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.695327] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2c1ea0f-33eb-4557-bd73-6f095ac55cd3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.702536] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 631.702536] env[68279]: value = "task-2962478" [ 631.702536] env[68279]: _type = "Task" [ 631.702536] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.712121] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.723296] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962476, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.876400] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962471, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649459} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.876693] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b869231a-5293-433f-ac7c-d50030368826/b869231a-5293-433f-ac7c-d50030368826.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 631.877262] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 631.877262] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-760edd22-de66-4841-b508-97e1a746ec98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.884095] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 631.884095] env[68279]: value = "task-2962479" [ 631.884095] env[68279]: _type = "Task" [ 631.884095] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.893800] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962479, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.045865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.121889] env[68279]: DEBUG oslo_vmware.api [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962469, 'name': PowerOnVM_Task, 'duration_secs': 1.776008} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.123178] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.123424] env[68279]: INFO nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Took 19.36 seconds to spawn the instance on the hypervisor. [ 632.123604] env[68279]: DEBUG nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.124594] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc37eb43-a423-43d7-a351-4ad10f4e706c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.137639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.137932] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquired lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.138300] env[68279]: DEBUG nova.network.neutron [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.177559] env[68279]: DEBUG nova.compute.utils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 632.184746] env[68279]: DEBUG nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 632.184746] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 632.199225] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962477, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.213793] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962478, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.226544] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962476, 'name': CreateVM_Task, 'duration_secs': 0.519458} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.226544] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.226544] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.226544] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.226952] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 632.229638] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df0e5ad2-eb8b-48f6-8eda-b23b0291700b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.233117] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 632.233117] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529d4963-bbbd-73a6-2a20-0c2456a590db" [ 632.233117] env[68279]: _type = "Task" [ 632.233117] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.246631] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529d4963-bbbd-73a6-2a20-0c2456a590db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.268521] env[68279]: DEBUG nova.policy [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c390fb2ba95249028d9cb30962259b12', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4632448f387a49eda08bcdc55b94a84c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 632.400390] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962479, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107905} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.403235] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 632.404896] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c45d817-c49c-43e4-a556-c55f915d0586 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.431751] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] b869231a-5293-433f-ac7c-d50030368826/b869231a-5293-433f-ac7c-d50030368826.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.435360] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-393af6bd-f2b8-4e1a-ace7-9bdab6621f7f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.461018] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 632.461018] env[68279]: value = "task-2962480" [ 632.461018] env[68279]: _type = "Task" [ 632.461018] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.475839] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962480, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.603180] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a84fad7-c6e5-4abf-99f3-3fde1daedd6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.609995] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26bb07d-7b04-4a9a-b109-e6ebd96882c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.656256] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101ec26d-c210-4799-ad1c-e657f0533622 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.664376] env[68279]: INFO nova.compute.manager [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Took 29.22 seconds to build instance. [ 632.676851] env[68279]: DEBUG nova.network.neutron [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updated VIF entry in instance network info cache for port 949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 632.677361] env[68279]: DEBUG nova.network.neutron [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updating instance_info_cache with network_info: [{"id": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "address": "fa:16:3e:ad:43:12", "network": {"id": "d2cab8c6-aa6c-4741-8ffe-4e7a4f40a698", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2022110668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0703802e5fb4caf939a345174f379be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949c2fdc-85", "ovs_interfaceid": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.679869] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08799d77-6a1e-4f8b-a771-46c061863ef8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.689503] env[68279]: DEBUG nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 632.692090] env[68279]: DEBUG oslo_vmware.api [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962477, 'name': PowerOnVM_Task, 'duration_secs': 0.800345} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.694314] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.694507] env[68279]: INFO nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Took 17.68 seconds to spawn the instance on the hypervisor. [ 632.694700] env[68279]: DEBUG nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.695917] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9f3213-7385-4557-8e64-f87fe0e2f58c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.709120] env[68279]: DEBUG nova.compute.provider_tree [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.726150] env[68279]: DEBUG oslo_vmware.api [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962478, 'name': PowerOnVM_Task, 'duration_secs': 0.513997} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.726436] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.726620] env[68279]: INFO nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Took 22.27 seconds to spawn the instance on the hypervisor. [ 632.726788] env[68279]: DEBUG nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.727602] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2eeac4f-0f7f-4b06-9813-c791a52d5daa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.748209] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529d4963-bbbd-73a6-2a20-0c2456a590db, 'name': SearchDatastore_Task, 'duration_secs': 0.046707} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.748572] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.749236] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.749236] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.749236] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.749516] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 632.749626] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-453bfc66-d769-4e47-985d-6a7f6bfcb00b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.761230] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 632.761422] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 632.762258] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcae09fc-4ef9-4edf-942e-edd7aff4fe2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.771839] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 632.771839] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52faef46-f646-90b9-ee52-ea5d060abbeb" [ 632.771839] env[68279]: _type = "Task" [ 632.771839] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.777201] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52faef46-f646-90b9-ee52-ea5d060abbeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.808333] env[68279]: DEBUG nova.network.neutron [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.974758] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.171276] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3a7a4629-caf0-4922-9373-0ea12c98265f tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.732s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.190036] env[68279]: DEBUG oslo_concurrency.lockutils [req-05204664-7e14-452f-bb24-09431f2bf8d5 req-76b0c4a4-e80c-4749-a8bf-076c3a5d2c76 service nova] Releasing lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.219344] env[68279]: DEBUG nova.scheduler.client.report [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 633.237058] env[68279]: INFO nova.compute.manager [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Took 24.99 seconds to build instance. [ 633.258658] env[68279]: INFO nova.compute.manager [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Took 30.33 seconds to build instance. [ 633.288209] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52faef46-f646-90b9-ee52-ea5d060abbeb, 'name': SearchDatastore_Task, 'duration_secs': 0.021756} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.289382] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7c1bccd-afd9-4293-9375-7e36845e5738 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.298558] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 633.298558] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf04c4-00e2-4a41-81d1-580aab764a84" [ 633.298558] env[68279]: _type = "Task" [ 633.298558] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.309844] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf04c4-00e2-4a41-81d1-580aab764a84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.389930] env[68279]: DEBUG nova.compute.manager [req-3f64bd4b-8faf-4671-bae1-c2fec5b279d5 req-c1cbbf33-83a1-41ce-ac69-06ce7016e57d service nova] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Received event network-vif-deleted-485b302b-3131-449a-ae46-edcfc4e6a588 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 633.473169] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962480, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.538419] env[68279]: DEBUG nova.network.neutron [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Successfully updated port: bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 633.613617] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.614045] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.675513] env[68279]: DEBUG nova.compute.manager [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 633.703051] env[68279]: DEBUG nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 633.730702] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.059s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.735339] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.775s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.735718] env[68279]: DEBUG nova.objects.instance [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lazy-loading 'resources' on Instance uuid e65722bb-e39a-47e5-9aaf-87cfd27930d1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 633.740260] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e95b8198-d53f-4d13-ab7b-3893b5d5a5e1 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.498s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.747223] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 633.747359] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.747402] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 633.747568] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.747711] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 633.748435] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 633.748435] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 633.748435] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 633.748587] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 633.748657] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 633.748835] env[68279]: DEBUG nova.virt.hardware [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 633.750296] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f3f0ea-ee42-497f-9add-05ff0f2413b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.759484] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21293e7-4975-4407-b650-30ea4009285e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.766153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-232a759f-98eb-4c18-9441-a50a3386e783 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.846s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.781362] env[68279]: INFO nova.scheduler.client.report [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Deleted allocations for instance 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7 [ 633.810866] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf04c4-00e2-4a41-81d1-580aab764a84, 'name': SearchDatastore_Task, 'duration_secs': 0.037017} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.811157] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.811433] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7/6ca13774-f4db-4c9c-9da7-b773ce6cc6e7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 633.811715] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec1ece4c-02de-492a-a1d7-7f49d34d68b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.818697] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 633.818697] env[68279]: value = "task-2962481" [ 633.818697] env[68279]: _type = "Task" [ 633.818697] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.829401] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.929901] env[68279]: DEBUG nova.network.neutron [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Updating instance_info_cache with network_info: [{"id": "5340623b-93eb-400e-be25-8fc583cdfe46", "address": "fa:16:3e:3f:79:51", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5340623b-93", "ovs_interfaceid": "5340623b-93eb-400e-be25-8fc583cdfe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.973195] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962480, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.045180] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.045284] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.045492] env[68279]: DEBUG nova.network.neutron [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.212394] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.244400] env[68279]: DEBUG nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.270832] env[68279]: DEBUG nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.293767] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d7e2d62-48bc-4e94-8e8f-319236881ec8 tempest-DeleteServersAdminTestJSON-1479790736 tempest-DeleteServersAdminTestJSON-1479790736-project-admin] Lock "8782d86d-0e94-44b4-9595-b0eb2b2a3fb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.921s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.331278] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Successfully created port: 0622e137-f260-4168-8beb-bd4491038e88 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.338954] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962481, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.376367] env[68279]: DEBUG nova.compute.manager [None req-7f54d7f5-8ea4-4297-87a5-82bc11fc613b tempest-ServerDiagnosticsV248Test-1426662560 tempest-ServerDiagnosticsV248Test-1426662560-project-admin] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.378234] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e57e497-78a1-49fa-930e-a7e1eab4f1ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.388184] env[68279]: INFO nova.compute.manager [None req-7f54d7f5-8ea4-4297-87a5-82bc11fc613b tempest-ServerDiagnosticsV248Test-1426662560 tempest-ServerDiagnosticsV248Test-1426662560-project-admin] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Retrieving diagnostics [ 634.389057] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26177e9b-726d-4483-9ddb-0efe3c65d313 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.432351] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Releasing lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.432667] env[68279]: DEBUG nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Instance network_info: |[{"id": "5340623b-93eb-400e-be25-8fc583cdfe46", "address": "fa:16:3e:3f:79:51", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5340623b-93", "ovs_interfaceid": "5340623b-93eb-400e-be25-8fc583cdfe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 634.433092] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:79:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5340623b-93eb-400e-be25-8fc583cdfe46', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 634.441656] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Creating folder: Project (0c1f31d38eb648e1ad8f5355e5a90a06). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 634.444789] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f554787-871f-416d-bec3-c0085896e73d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.456076] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Created folder: Project (0c1f31d38eb648e1ad8f5355e5a90a06) in parent group-v594445. [ 634.456285] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Creating folder: Instances. Parent ref: group-v594474. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 634.456533] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53795512-7e0f-419f-9e47-718f94bb9ef0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.470904] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Created folder: Instances in parent group-v594474. [ 634.471257] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.471436] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 634.472023] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3749054c-bac5-48ac-90e5-70fd2070487e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.491126] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962480, 'name': ReconfigVM_Task, 'duration_secs': 1.591818} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.495143] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Reconfigured VM instance instance-00000009 to attach disk [datastore2] b869231a-5293-433f-ac7c-d50030368826/b869231a-5293-433f-ac7c-d50030368826.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 634.496088] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d65f0bb-6c2b-463c-945e-fe0ee0cf2fbf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.500148] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 634.500148] env[68279]: value = "task-2962484" [ 634.500148] env[68279]: _type = "Task" [ 634.500148] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.504800] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 634.504800] env[68279]: value = "task-2962485" [ 634.504800] env[68279]: _type = "Task" [ 634.504800] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.516277] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962484, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.522384] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962485, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.656439] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e71ca5-41c6-41b9-bde3-135b352417b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.665890] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a78e93-8279-43b3-a297-e9aac0da0c2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.704231] env[68279]: DEBUG nova.network.neutron [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.705995] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2054283a-7959-4824-8a6b-618aa398017a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.714837] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf2adfe-5ef0-40cb-bb70-0e0603b091a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.731632] env[68279]: DEBUG nova.compute.provider_tree [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.783852] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.801491] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.832702] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.857246} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.833614] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7/6ca13774-f4db-4c9c-9da7-b773ce6cc6e7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 634.833614] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 634.833614] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8061b8b0-91b2-40cf-8c4b-10eb2b3cedc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.840181] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 634.840181] env[68279]: value = "task-2962486" [ 634.840181] env[68279]: _type = "Task" [ 634.840181] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.848859] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.014070] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962484, 'name': CreateVM_Task, 'duration_secs': 0.328619} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.014070] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 635.014511] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.014757] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.014987] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 635.020124] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f00fa896-1764-4953-981c-a91c1b02f096 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.020525] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962485, 'name': Rename_Task, 'duration_secs': 0.278347} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.020797] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 635.021387] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9882b079-09dd-478b-96b6-5ee8d11dcfbe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.029268] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 635.029268] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52226394-447c-ca32-91a7-7907f8b1adc8" [ 635.029268] env[68279]: _type = "Task" [ 635.029268] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.037655] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 635.037655] env[68279]: value = "task-2962487" [ 635.037655] env[68279]: _type = "Task" [ 635.037655] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.044242] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52226394-447c-ca32-91a7-7907f8b1adc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.049361] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962487, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.237127] env[68279]: DEBUG nova.scheduler.client.report [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 635.351830] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109389} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.351830] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 635.352598] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b87ce02-7296-4f38-8edb-0cf9da301061 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.376170] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7/6ca13774-f4db-4c9c-9da7-b773ce6cc6e7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 635.376170] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d8523b6-a9f8-4e1a-b022-e43eddeb5737 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.396325] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 635.396325] env[68279]: value = "task-2962488" [ 635.396325] env[68279]: _type = "Task" [ 635.396325] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.404884] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962488, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.496334] env[68279]: DEBUG nova.network.neutron [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Updating instance_info_cache with network_info: [{"id": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "address": "fa:16:3e:94:8e:b5", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd0e2597-ab", "ovs_interfaceid": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.540575] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52226394-447c-ca32-91a7-7907f8b1adc8, 'name': SearchDatastore_Task, 'duration_secs': 0.02715} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.547348] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.547843] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 635.547898] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.548033] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.548650] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.548650] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83526fa4-dd62-4435-9110-86107f19ef4e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.563246] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962487, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.563246] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.563246] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 635.564234] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d51bc011-d256-4a06-b796-af828b83ae42 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.572761] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 635.572761] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e7459-0482-2b2a-a2fa-e9518822ffdc" [ 635.572761] env[68279]: _type = "Task" [ 635.572761] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.586022] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e7459-0482-2b2a-a2fa-e9518822ffdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.615923] env[68279]: DEBUG nova.network.neutron [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Successfully updated port: 1e64662e-baf7-4c5a-9f9c-387637e18c28 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 635.748865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.755232] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.185s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.755546] env[68279]: DEBUG nova.objects.instance [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lazy-loading 'resources' on Instance uuid 3d3b5611-714f-4757-b848-891319c2fea3 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 635.782399] env[68279]: INFO nova.scheduler.client.report [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Deleted allocations for instance e65722bb-e39a-47e5-9aaf-87cfd27930d1 [ 635.910252] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962488, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.001643] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Releasing lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.002122] env[68279]: DEBUG nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Instance network_info: |[{"id": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "address": "fa:16:3e:94:8e:b5", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd0e2597-ab", "ovs_interfaceid": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 636.003046] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:8e:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3fca0ab6-cc80-429f-9117-885f170135b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd0e2597-abb7-4689-856c-4ad289b6c70d', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.015821] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Creating folder: Project (8e382d6dee334cd2bcf097cbe56f1143). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.018223] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41180ec0-6b66-4523-888b-57ed13e35c66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.033264] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Created folder: Project (8e382d6dee334cd2bcf097cbe56f1143) in parent group-v594445. [ 636.033264] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Creating folder: Instances. Parent ref: group-v594477. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.033520] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5eae10ba-666a-48fb-b312-3cac63508345 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.046834] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Created folder: Instances in parent group-v594477. [ 636.047089] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 636.047296] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 636.047880] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b18662d-c7d3-43f4-b83a-5b1b51d5b718 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.065880] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962487, 'name': PowerOnVM_Task} progress is 91%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.071219] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.071219] env[68279]: value = "task-2962491" [ 636.071219] env[68279]: _type = "Task" [ 636.071219] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.094523] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e7459-0482-2b2a-a2fa-e9518822ffdc, 'name': SearchDatastore_Task, 'duration_secs': 0.01263} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.094970] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962491, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.095639] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a76b8251-f151-4dbc-be98-f2025b02f371 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.104498] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 636.104498] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52085078-8ab3-9792-81dd-3c89ffa4e75f" [ 636.104498] env[68279]: _type = "Task" [ 636.104498] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.115459] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52085078-8ab3-9792-81dd-3c89ffa4e75f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.118655] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "refresh_cache-c9bda338-6c7d-4850-8f46-7cd916372ac9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.118827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquired lock "refresh_cache-c9bda338-6c7d-4850-8f46-7cd916372ac9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.119100] env[68279]: DEBUG nova.network.neutron [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 636.179347] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Successfully created port: 98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.295490] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce9d3b0-3024-4731-b7a2-8504783ba093 tempest-ServerDiagnosticsNegativeTest-2020419269 tempest-ServerDiagnosticsNegativeTest-2020419269-project-member] Lock "e65722bb-e39a-47e5-9aaf-87cfd27930d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.950s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.407967] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962488, 'name': ReconfigVM_Task, 'duration_secs': 0.796709} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.407967] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7/6ca13774-f4db-4c9c-9da7-b773ce6cc6e7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 636.408259] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c5f18c0-01fc-4210-9524-d608418a0205 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.419217] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 636.419217] env[68279]: value = "task-2962492" [ 636.419217] env[68279]: _type = "Task" [ 636.419217] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.436183] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962492, 'name': Rename_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.570038] env[68279]: DEBUG oslo_vmware.api [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962487, 'name': PowerOnVM_Task, 'duration_secs': 1.161696} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.570038] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 636.570038] env[68279]: INFO nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Took 14.93 seconds to spawn the instance on the hypervisor. [ 636.570366] env[68279]: DEBUG nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 636.574030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2e3b56-4358-46b0-8146-4c30b22b3776 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.593138] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962491, 'name': CreateVM_Task, 'duration_secs': 0.389535} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.606891] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 636.610997] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.610997] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.610997] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 636.619376] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cc37544-ae95-4613-86a3-4abee01ef28e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.638032] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52085078-8ab3-9792-81dd-3c89ffa4e75f, 'name': SearchDatastore_Task, 'duration_secs': 0.011941} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.638032] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 636.638032] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf5dbc-6549-71d1-fdae-d2df986cabfe" [ 636.638032] env[68279]: _type = "Task" [ 636.638032] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.638032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.638032] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 4c99c929-9fda-42f0-9327-0508ad3e6150/4c99c929-9fda-42f0-9327-0508ad3e6150.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 636.640816] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7beab140-749e-416c-8ace-1a3bcea1f01d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.669028] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf5dbc-6549-71d1-fdae-d2df986cabfe, 'name': SearchDatastore_Task, 'duration_secs': 0.011401} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.669028] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 636.669028] env[68279]: value = "task-2962493" [ 636.669028] env[68279]: _type = "Task" [ 636.669028] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.669028] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.669028] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 636.669519] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.669519] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.669519] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 636.675440] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fec5a2fc-5b45-48b0-b930-5081534c909f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.694776] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.700426] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 636.702237] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 636.712124] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb4b0c1a-4e09-4353-8644-f45133637a70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.722961] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 636.722961] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528a214c-5d6d-386c-9196-ed3424d36408" [ 636.722961] env[68279]: _type = "Task" [ 636.722961] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.737526] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528a214c-5d6d-386c-9196-ed3424d36408, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.744376] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cf7642-0995-4fb5-93e6-f6c2a19b7b1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.759048] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0b414e-874f-4eb0-865c-5419fbd6ba2d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.800109] env[68279]: DEBUG nova.network.neutron [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.802777] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e44d267-655b-4e85-bd38-f3bbf3aa2d37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.814081] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c073da0b-c8f5-487c-99b8-f37c554d53fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.830442] env[68279]: DEBUG nova.compute.provider_tree [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.937890] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962492, 'name': Rename_Task, 'duration_secs': 0.175348} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.937890] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 636.937890] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2a335f9-9f40-495e-a4ea-4ac701b73912 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.942945] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 636.942945] env[68279]: value = "task-2962494" [ 636.942945] env[68279]: _type = "Task" [ 636.942945] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.954832] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.121162] env[68279]: INFO nova.compute.manager [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Took 20.53 seconds to build instance. [ 637.186753] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962493, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.232521] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528a214c-5d6d-386c-9196-ed3424d36408, 'name': SearchDatastore_Task, 'duration_secs': 0.011551} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.233547] env[68279]: DEBUG nova.network.neutron [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Updating instance_info_cache with network_info: [{"id": "1e64662e-baf7-4c5a-9f9c-387637e18c28", "address": "fa:16:3e:6e:09:fa", "network": {"id": "14808924-402c-4591-90f3-2f5d974570d8", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-917992346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce23f4f8da064cdeb40d4c6733be5338", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e64662e-ba", "ovs_interfaceid": "1e64662e-baf7-4c5a-9f9c-387637e18c28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.235301] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-528912d6-355f-41cb-b227-3fcddafb15c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.242460] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 637.242460] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b53ed0-6973-e92f-4e5d-9eee0d3df729" [ 637.242460] env[68279]: _type = "Task" [ 637.242460] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.254242] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b53ed0-6973-e92f-4e5d-9eee0d3df729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.336768] env[68279]: DEBUG nova.scheduler.client.report [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.455086] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.491424] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Successfully created port: 83a927c2-f8a2-4b72-a78e-a206cc03e8d8 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.624774] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a40bd291-7b73-4dfd-ba90-700240748e7f tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "b869231a-5293-433f-ac7c-d50030368826" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.044s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.687444] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962493, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567963} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.689680] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 4c99c929-9fda-42f0-9327-0508ad3e6150/4c99c929-9fda-42f0-9327-0508ad3e6150.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 637.690212] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 637.691750] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-495a3d1f-9b2c-444a-b160-1f969c649a31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.700197] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 637.700197] env[68279]: value = "task-2962495" [ 637.700197] env[68279]: _type = "Task" [ 637.700197] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.710022] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.741023] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Releasing lock "refresh_cache-c9bda338-6c7d-4850-8f46-7cd916372ac9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.741023] env[68279]: DEBUG nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Instance network_info: |[{"id": "1e64662e-baf7-4c5a-9f9c-387637e18c28", "address": "fa:16:3e:6e:09:fa", "network": {"id": "14808924-402c-4591-90f3-2f5d974570d8", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-917992346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce23f4f8da064cdeb40d4c6733be5338", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e64662e-ba", "ovs_interfaceid": "1e64662e-baf7-4c5a-9f9c-387637e18c28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 637.741265] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:09:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e64662e-baf7-4c5a-9f9c-387637e18c28', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 637.750478] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Creating folder: Project (ce23f4f8da064cdeb40d4c6733be5338). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 637.750858] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08e54111-c95d-49d9-8d99-bf3fb42bad13 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.763595] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b53ed0-6973-e92f-4e5d-9eee0d3df729, 'name': SearchDatastore_Task, 'duration_secs': 0.012555} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.765121] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.765528] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7/65c3761e-c236-41a9-9adb-d1a6e7a9a7c7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 637.765941] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Created folder: Project (ce23f4f8da064cdeb40d4c6733be5338) in parent group-v594445. [ 637.766332] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Creating folder: Instances. Parent ref: group-v594480. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 637.766668] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d0fd4e0-c869-4563-b623-7002cb32333e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.769551] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66418ceb-eac6-46f0-aa21-23e2a1a6824d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.775964] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 637.775964] env[68279]: value = "task-2962497" [ 637.775964] env[68279]: _type = "Task" [ 637.775964] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.784784] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.786294] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Created folder: Instances in parent group-v594480. [ 637.786543] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 637.786732] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 637.787187] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23c9bc34-ba8f-4b94-8434-e534201757d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.807299] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 637.807299] env[68279]: value = "task-2962499" [ 637.807299] env[68279]: _type = "Task" [ 637.807299] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.814723] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962499, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.845665] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.094s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.848353] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.803s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.850078] env[68279]: INFO nova.compute.claims [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.875877] env[68279]: INFO nova.scheduler.client.report [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Deleted allocations for instance 3d3b5611-714f-4757-b848-891319c2fea3 [ 637.965214] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.133497] env[68279]: DEBUG nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 638.219127] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069985} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.219127] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 638.219847] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e408be7-314d-4410-a706-5d4ac530431f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.280847] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 4c99c929-9fda-42f0-9327-0508ad3e6150/4c99c929-9fda-42f0-9327-0508ad3e6150.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 638.280847] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63ba9a36-79ec-4490-9908-2ef33cfc61cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.314351] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 638.314351] env[68279]: value = "task-2962500" [ 638.314351] env[68279]: _type = "Task" [ 638.314351] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.318214] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962497, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.324578] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962499, 'name': CreateVM_Task, 'duration_secs': 0.34008} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.325158] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 638.325963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.326044] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.326455] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 638.330185] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59f53f5e-e819-470f-be63-048bb3b88bae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.332474] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.338960] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 638.338960] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e88cd9-6fcc-8bf2-42bc-2e659233368f" [ 638.338960] env[68279]: _type = "Task" [ 638.338960] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.350332] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e88cd9-6fcc-8bf2-42bc-2e659233368f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.389272] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e88af658-037d-4e65-8345-3a4c95d29331 tempest-TenantUsagesTestJSON-2039127984 tempest-TenantUsagesTestJSON-2039127984-project-member] Lock "3d3b5611-714f-4757-b848-891319c2fea3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.548s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.458780] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.662300] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.749536] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Received event network-vif-plugged-5340623b-93eb-400e-be25-8fc583cdfe46 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.750588] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Acquiring lock "4c99c929-9fda-42f0-9327-0508ad3e6150-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.750830] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.751161] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.751298] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] No waiting events found dispatching network-vif-plugged-5340623b-93eb-400e-be25-8fc583cdfe46 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 638.751477] env[68279]: WARNING nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Received unexpected event network-vif-plugged-5340623b-93eb-400e-be25-8fc583cdfe46 for instance with vm_state building and task_state spawning. [ 638.751687] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Received event network-changed-5340623b-93eb-400e-be25-8fc583cdfe46 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.751864] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Refreshing instance network info cache due to event network-changed-5340623b-93eb-400e-be25-8fc583cdfe46. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 638.752060] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Acquiring lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.752195] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Acquired lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.752386] env[68279]: DEBUG nova.network.neutron [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Refreshing network info cache for port 5340623b-93eb-400e-be25-8fc583cdfe46 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 638.804117] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962497, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.850274} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.804117] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7/65c3761e-c236-41a9-9adb-d1a6e7a9a7c7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 638.804412] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 638.804579] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e0d796f-d477-49d1-b2af-1889c59fa16d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.811908] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 638.811908] env[68279]: value = "task-2962505" [ 638.811908] env[68279]: _type = "Task" [ 638.811908] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.840970] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.840970] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.850646] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e88cd9-6fcc-8bf2-42bc-2e659233368f, 'name': SearchDatastore_Task, 'duration_secs': 0.060613} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.852069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.852069] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.852069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.852069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.852439] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.852439] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f5cb367-67e6-4bac-a06e-167bc821d3fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.865763] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.865763] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.865763] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d2f8c7f-1ed8-458c-8daf-bb327df8f27f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.870512] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 638.870512] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52012e49-c056-4c2b-aee0-ba8a4d7892e8" [ 638.870512] env[68279]: _type = "Task" [ 638.870512] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.880755] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52012e49-c056-4c2b-aee0-ba8a4d7892e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.970705] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962494, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.096848] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "deea2dea-1860-45a0-9637-ced09bb51b81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.097078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "deea2dea-1860-45a0-9637-ced09bb51b81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.272041] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4e1c4a-4f40-4405-a3ca-518668d6b032 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.282140] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd5344a-e881-4103-889e-83eb2448e0cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.328174] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7512e1c1-7574-4d29-9d7e-8fa9e77f46d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.346992] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.403551} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.347266] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962500, 'name': ReconfigVM_Task, 'duration_secs': 0.731459} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.349808] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb35f19c-c3b6-4388-9d5d-be56f057989f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.358240] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 639.358621] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 4c99c929-9fda-42f0-9327-0508ad3e6150/4c99c929-9fda-42f0-9327-0508ad3e6150.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 639.359873] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98555522-11fa-4ffc-8ef5-fc8bab31d614 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.363740] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40063fc3-1c56-40f8-ae0a-3a09fb908d46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.379641] env[68279]: DEBUG nova.compute.provider_tree [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.404419] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7/65c3761e-c236-41a9-9adb-d1a6e7a9a7c7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 639.404789] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 639.404789] env[68279]: value = "task-2962506" [ 639.404789] env[68279]: _type = "Task" [ 639.404789] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.408308] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d820e42-ea0b-461a-9af1-f49206fc8cea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.438013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.438275] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.439027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "11c439ab-e27c-43e6-b752-c90af5f84bc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.439027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.439027] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52012e49-c056-4c2b-aee0-ba8a4d7892e8, 'name': SearchDatastore_Task, 'duration_secs': 0.017205} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.440254] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9185035f-c78f-4e6b-9c42-6b28b29bc301 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.447727] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 639.447727] env[68279]: value = "task-2962507" [ 639.447727] env[68279]: _type = "Task" [ 639.447727] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.450507] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962506, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.466024] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 639.466024] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529bd6c1-6c63-997d-98ac-0ac9f41ccbe1" [ 639.466024] env[68279]: _type = "Task" [ 639.466024] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.479853] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.480463] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962494, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.487463] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529bd6c1-6c63-997d-98ac-0ac9f41ccbe1, 'name': SearchDatastore_Task, 'duration_secs': 0.018603} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.487860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.488036] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c9bda338-6c7d-4850-8f46-7cd916372ac9/c9bda338-6c7d-4850-8f46-7cd916372ac9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.488364] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c424cad-1e00-4c0f-8179-4be0cb652309 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.497387] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 639.497387] env[68279]: value = "task-2962508" [ 639.497387] env[68279]: _type = "Task" [ 639.497387] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.506403] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.909131] env[68279]: DEBUG nova.scheduler.client.report [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.939283] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962506, 'name': Rename_Task, 'duration_secs': 0.244475} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.940414] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 639.940692] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cfe61e9-b821-404c-b6a9-46d04e5b336e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.949200] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 639.949200] env[68279]: value = "task-2962509" [ 639.949200] env[68279]: _type = "Task" [ 639.949200] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.963923] env[68279]: DEBUG nova.network.neutron [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Updated VIF entry in instance network info cache for port 5340623b-93eb-400e-be25-8fc583cdfe46. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 639.964457] env[68279]: DEBUG nova.network.neutron [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Updating instance_info_cache with network_info: [{"id": "5340623b-93eb-400e-be25-8fc583cdfe46", "address": "fa:16:3e:3f:79:51", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5340623b-93", "ovs_interfaceid": "5340623b-93eb-400e-be25-8fc583cdfe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.979247] env[68279]: DEBUG oslo_vmware.api [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2962494, 'name': PowerOnVM_Task, 'duration_secs': 2.865287} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.979247] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962507, 'name': ReconfigVM_Task, 'duration_secs': 0.309696} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.981258] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 639.981758] env[68279]: INFO nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Took 15.99 seconds to spawn the instance on the hypervisor. [ 639.981758] env[68279]: DEBUG nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 639.982521] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7/65c3761e-c236-41a9-9adb-d1a6e7a9a7c7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 639.983967] env[68279]: DEBUG nova.compute.manager [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 639.991263] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8bde42-9e55-46a4-8934-e7520562e12d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.996035] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19e776fc-59ed-4d64-8fcb-bd28ff556d0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.004029] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962509, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.019085] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 640.019085] env[68279]: value = "task-2962510" [ 640.019085] env[68279]: _type = "Task" [ 640.019085] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.028376] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962508, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.034706] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962510, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.121641] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Successfully updated port: 0622e137-f260-4168-8beb-bd4491038e88 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 640.207229] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.207229] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.418657] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.419546] env[68279]: DEBUG nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 640.422816] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.211s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.425034] env[68279]: INFO nova.compute.claims [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.468567] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962509, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.475020] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Releasing lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.475370] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Received event network-vif-plugged-bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 640.475588] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Acquiring lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.475839] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.476023] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.476186] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] No waiting events found dispatching network-vif-plugged-bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 640.476356] env[68279]: WARNING nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Received unexpected event network-vif-plugged-bd0e2597-abb7-4689-856c-4ad289b6c70d for instance with vm_state building and task_state spawning. [ 640.476515] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Received event network-changed-bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 640.476666] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Refreshing instance network info cache due to event network-changed-bd0e2597-abb7-4689-856c-4ad289b6c70d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 640.476847] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Acquiring lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.476976] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Acquired lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.477141] env[68279]: DEBUG nova.network.neutron [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Refreshing network info cache for port bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 640.524131] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550079} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.528930] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.534233] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c9bda338-6c7d-4850-8f46-7cd916372ac9/c9bda338-6c7d-4850-8f46-7cd916372ac9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.535280] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.535382] env[68279]: INFO nova.compute.manager [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Took 22.08 seconds to build instance. [ 640.536718] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c29c043-cf20-423b-a610-3ddbdf0486ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.546598] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962510, 'name': Rename_Task, 'duration_secs': 0.146609} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.546598] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 640.547033] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 640.547033] env[68279]: value = "task-2962511" [ 640.547033] env[68279]: _type = "Task" [ 640.547033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.547248] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c7d8636-532d-4c52-a21b-149b1d08904b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.562592] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.565723] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 640.565723] env[68279]: value = "task-2962512" [ 640.565723] env[68279]: _type = "Task" [ 640.565723] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.575733] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.654121] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "bf4e6484-d17d-4244-9163-1ef0012874b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.654757] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.924361] env[68279]: DEBUG nova.compute.utils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 640.927058] env[68279]: DEBUG nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 640.927058] env[68279]: DEBUG nova.network.neutron [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.970932] env[68279]: DEBUG oslo_vmware.api [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962509, 'name': PowerOnVM_Task, 'duration_secs': 0.892852} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.973026] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 640.973026] env[68279]: INFO nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Took 14.57 seconds to spawn the instance on the hypervisor. [ 640.973026] env[68279]: DEBUG nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.973224] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98e4855-93c8-4053-a4de-6d076ae6c82d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.008376] env[68279]: DEBUG nova.policy [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b955266bea154f668cbe9a52faedb22f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '494fc6347abb4d6cba544a933229e2ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 641.039954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7bda94-001e-476c-90fb-52290646b149 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.596s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.059549] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08402} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.060011] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.060769] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c920d54-ecc9-4e97-9591-9d1598c50ab6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.085974] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] c9bda338-6c7d-4850-8f46-7cd916372ac9/c9bda338-6c7d-4850-8f46-7cd916372ac9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.092479] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fad7d64-d07d-4f6e-b4ed-3ee8df7cbaf4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.116306] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962512, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.117754] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 641.117754] env[68279]: value = "task-2962513" [ 641.117754] env[68279]: _type = "Task" [ 641.117754] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.126082] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962513, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.293028] env[68279]: DEBUG nova.compute.manager [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Received event network-changed-e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 641.293028] env[68279]: DEBUG nova.compute.manager [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Refreshing instance network info cache due to event network-changed-e402e4c3-f9cc-469b-a10e-86b1f89eddad. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 641.293028] env[68279]: DEBUG oslo_concurrency.lockutils [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] Acquiring lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.293028] env[68279]: DEBUG oslo_concurrency.lockutils [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] Acquired lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.293028] env[68279]: DEBUG nova.network.neutron [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Refreshing network info cache for port e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 641.432736] env[68279]: DEBUG nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 641.497194] env[68279]: INFO nova.compute.manager [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Took 22.09 seconds to build instance. [ 641.543400] env[68279]: DEBUG nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 641.590313] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962512, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.637572] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962513, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.763276] env[68279]: DEBUG nova.network.neutron [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Updated VIF entry in instance network info cache for port bd0e2597-abb7-4689-856c-4ad289b6c70d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 641.765319] env[68279]: DEBUG nova.network.neutron [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Updating instance_info_cache with network_info: [{"id": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "address": "fa:16:3e:94:8e:b5", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd0e2597-ab", "ovs_interfaceid": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.817030] env[68279]: DEBUG nova.network.neutron [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Successfully created port: 2c75e839-da7d-4baa-85d0-ea0ad60abf2c {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.910580] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ea121f-c6e2-4216-957d-3be10d4ae30f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.919601] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d719edc-9a20-4650-8d20-b1f241e96312 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.963079] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9291deca-9fdf-4a87-9a41-13640fe6d892 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.979383] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664f5637-9f93-4646-be1d-818585743670 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.995576] env[68279]: DEBUG nova.compute.provider_tree [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 641.998990] env[68279]: DEBUG oslo_concurrency.lockutils [None req-34875f4c-a165-4106-b6bd-904a0c14aff6 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.599s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.071169] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.090678] env[68279]: DEBUG oslo_vmware.api [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2962512, 'name': PowerOnVM_Task, 'duration_secs': 1.2711} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.091444] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 642.091744] env[68279]: INFO nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Took 13.29 seconds to spawn the instance on the hypervisor. [ 642.091983] env[68279]: DEBUG nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 642.092940] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cec48d2-f104-4e33-a975-60882b068781 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.133158] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962513, 'name': ReconfigVM_Task, 'duration_secs': 0.550054} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.133158] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Reconfigured VM instance instance-0000000d to attach disk [datastore1] c9bda338-6c7d-4850-8f46-7cd916372ac9/c9bda338-6c7d-4850-8f46-7cd916372ac9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.133158] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-853e2914-00d4-44b4-ba36-850ba2f4063e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.139926] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 642.139926] env[68279]: value = "task-2962515" [ 642.139926] env[68279]: _type = "Task" [ 642.139926] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.152726] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962515, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.269820] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Releasing lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.270692] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Received event network-vif-plugged-1e64662e-baf7-4c5a-9f9c-387637e18c28 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 642.270788] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Acquiring lock "c9bda338-6c7d-4850-8f46-7cd916372ac9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.271060] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.271289] env[68279]: DEBUG oslo_concurrency.lockutils [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.271551] env[68279]: DEBUG nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] No waiting events found dispatching network-vif-plugged-1e64662e-baf7-4c5a-9f9c-387637e18c28 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 642.271768] env[68279]: WARNING nova.compute.manager [req-fb1dca12-8ee9-41c1-87d0-ba60d765411a req-50a1aa9f-e9e9-4682-af78-c781a1ccb4d1 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Received unexpected event network-vif-plugged-1e64662e-baf7-4c5a-9f9c-387637e18c28 for instance with vm_state building and task_state spawning. [ 642.396071] env[68279]: DEBUG nova.network.neutron [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updated VIF entry in instance network info cache for port e402e4c3-f9cc-469b-a10e-86b1f89eddad. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 642.396461] env[68279]: DEBUG nova.network.neutron [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updating instance_info_cache with network_info: [{"id": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "address": "fa:16:3e:05:7a:8e", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape402e4c3-f9", "ovs_interfaceid": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.449427] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "95f0aeaa-75ab-4fd9-b28d-e43703429167" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.449671] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.475035] env[68279]: DEBUG nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 642.502871] env[68279]: DEBUG nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.521659] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 642.521942] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.522117] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.522353] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.522474] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.522637] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 642.522842] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 642.523011] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 642.523195] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 642.523357] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 642.523722] env[68279]: DEBUG nova.virt.hardware [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 642.524532] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e714f564-b2ea-456f-8edd-5cffab754913 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.535579] env[68279]: ERROR nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [req-3b19c99d-555c-4252-968a-452186db99cb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3b19c99d-555c-4252-968a-452186db99cb"}]} [ 642.537529] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d9f685-c05a-40e9-bbff-ecec6790a889 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.565595] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 642.587040] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 642.587624] env[68279]: DEBUG nova.compute.provider_tree [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 642.602156] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 642.618191] env[68279]: INFO nova.compute.manager [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Took 23.17 seconds to build instance. [ 642.620627] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 642.654293] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962515, 'name': Rename_Task, 'duration_secs': 0.314792} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.654293] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.654293] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e9eda6c-6186-4435-92e3-dd0260537f18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.660593] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 642.660593] env[68279]: value = "task-2962516" [ 642.660593] env[68279]: _type = "Task" [ 642.660593] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.674710] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.900562] env[68279]: DEBUG oslo_concurrency.lockutils [req-3dcebba0-38fd-4c36-8c96-17c213e127e5 req-85e811ed-1ece-429e-847d-6e4763fbd0e1 service nova] Releasing lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.005862] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc1b056-ec84-420a-ba9a-5311e3d0d2ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.016426] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6993b763-4d2d-434b-accd-d3df9c973cdb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.053347] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.053557] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be326bf-3b22-4dcf-99e7-0f4d607ac089 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.061946] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b748ad1b-26a5-4410-978a-873b3aafb5eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.075813] env[68279]: DEBUG nova.compute.provider_tree [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.125246] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8da5698d-6952-479a-8746-d84e50d433ae tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.686s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.158615] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Successfully updated port: 98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 643.185195] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.352820] env[68279]: DEBUG nova.compute.manager [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Received event network-changed-1e64662e-baf7-4c5a-9f9c-387637e18c28 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 643.353035] env[68279]: DEBUG nova.compute.manager [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Refreshing instance network info cache due to event network-changed-1e64662e-baf7-4c5a-9f9c-387637e18c28. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 643.354278] env[68279]: DEBUG oslo_concurrency.lockutils [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] Acquiring lock "refresh_cache-c9bda338-6c7d-4850-8f46-7cd916372ac9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.357464] env[68279]: DEBUG oslo_concurrency.lockutils [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] Acquired lock "refresh_cache-c9bda338-6c7d-4850-8f46-7cd916372ac9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.361169] env[68279]: DEBUG nova.network.neutron [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Refreshing network info cache for port 1e64662e-baf7-4c5a-9f9c-387637e18c28 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 643.588651] env[68279]: DEBUG nova.network.neutron [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Successfully updated port: 2c75e839-da7d-4baa-85d0-ea0ad60abf2c {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 643.619521] env[68279]: ERROR nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [req-106adec9-12d1-461a-b2c6-772790aec8df] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-106adec9-12d1-461a-b2c6-772790aec8df"}]} [ 643.626479] env[68279]: DEBUG nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 643.662991] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 643.679976] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.709765] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 643.709999] env[68279]: DEBUG nova.compute.provider_tree [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.737283] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 643.761998] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 643.792200] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.792200] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.093136] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "refresh_cache-010e5bfc-814c-4bde-8a16-7c2009ee13b6" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.093394] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquired lock "refresh_cache-010e5bfc-814c-4bde-8a16-7c2009ee13b6" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.093430] env[68279]: DEBUG nova.network.neutron [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.164092] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.184517] env[68279]: DEBUG oslo_vmware.api [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962516, 'name': PowerOnVM_Task, 'duration_secs': 1.303065} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.184517] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 644.184517] env[68279]: INFO nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Took 12.94 seconds to spawn the instance on the hypervisor. [ 644.184517] env[68279]: DEBUG nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 644.185059] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb69048-0e9b-40cb-bc1b-b473b801d32a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.227819] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fc465d-c0c8-4af0-8011-691dadc10f9c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.239389] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c78394d-9aca-462e-bfc9-c4428c565d81 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.279491] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf3edd2-9df8-4faa-a9ff-e1e8d7050826 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.287979] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299456fc-8b9e-48f9-921b-b2db34dbd596 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.303096] env[68279]: DEBUG nova.compute.provider_tree [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 644.542900] env[68279]: DEBUG nova.network.neutron [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Updated VIF entry in instance network info cache for port 1e64662e-baf7-4c5a-9f9c-387637e18c28. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 644.543334] env[68279]: DEBUG nova.network.neutron [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Updating instance_info_cache with network_info: [{"id": "1e64662e-baf7-4c5a-9f9c-387637e18c28", "address": "fa:16:3e:6e:09:fa", "network": {"id": "14808924-402c-4591-90f3-2f5d974570d8", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-917992346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce23f4f8da064cdeb40d4c6733be5338", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e64662e-ba", "ovs_interfaceid": "1e64662e-baf7-4c5a-9f9c-387637e18c28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.717225] env[68279]: INFO nova.compute.manager [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Took 24.14 seconds to build instance. [ 644.718823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "61392426-52b8-437e-ab3d-122d9335cd36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.719071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "61392426-52b8-437e-ab3d-122d9335cd36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.719985] env[68279]: DEBUG nova.network.neutron [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.832264] env[68279]: ERROR nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [req-57aae7ba-e474-4331-a286-9727f8207a01] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-57aae7ba-e474-4331-a286-9727f8207a01"}]} [ 644.856674] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 644.873889] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 644.874381] env[68279]: DEBUG nova.compute.provider_tree [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 644.887613] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 644.909539] env[68279]: DEBUG nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 645.047635] env[68279]: DEBUG oslo_concurrency.lockutils [req-d46061cf-1f10-45c5-bb2f-3eefcbedca4f req-392729f4-497d-4abb-961d-f4dbcd0634b7 service nova] Releasing lock "refresh_cache-c9bda338-6c7d-4850-8f46-7cd916372ac9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.159269] env[68279]: DEBUG nova.network.neutron [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Updating instance_info_cache with network_info: [{"id": "2c75e839-da7d-4baa-85d0-ea0ad60abf2c", "address": "fa:16:3e:1d:b0:f3", "network": {"id": "3b6d3d94-a4e4-47ec-af59-8a3d2f2b954b", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1272323467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "494fc6347abb4d6cba544a933229e2ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51bac3c3-00ab-4a07-9e28-b3c951dee565", "external-id": "nsx-vlan-transportzone-645", "segmentation_id": 645, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c75e839-da", "ovs_interfaceid": "2c75e839-da7d-4baa-85d0-ea0ad60abf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.228753] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0458b793-6015-42b2-ab52-302f01a1afa0 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.665s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.326723] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa714c3-a41c-4a32-ae12-8bc7129865d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.335042] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962ecc42-17fb-4952-8738-a4980d267918 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.374283] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bebb03-a958-4f4e-8e32-ae2e8f433ae8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.383603] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913378be-df54-4801-a78a-439a17c27f03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.404039] env[68279]: DEBUG nova.compute.provider_tree [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 645.448929] env[68279]: DEBUG nova.compute.manager [None req-1274f35d-3702-4324-a0ea-03438b6aa51c tempest-ServerExternalEventsTest-1668494258 tempest-ServerExternalEventsTest-1668494258-project] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Received event network-changed {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.449144] env[68279]: DEBUG nova.compute.manager [None req-1274f35d-3702-4324-a0ea-03438b6aa51c tempest-ServerExternalEventsTest-1668494258 tempest-ServerExternalEventsTest-1668494258-project] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Refreshing instance network info cache due to event network-changed. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 645.449398] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1274f35d-3702-4324-a0ea-03438b6aa51c tempest-ServerExternalEventsTest-1668494258 tempest-ServerExternalEventsTest-1668494258-project] Acquiring lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.449553] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1274f35d-3702-4324-a0ea-03438b6aa51c tempest-ServerExternalEventsTest-1668494258 tempest-ServerExternalEventsTest-1668494258-project] Acquired lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.449808] env[68279]: DEBUG nova.network.neutron [None req-1274f35d-3702-4324-a0ea-03438b6aa51c tempest-ServerExternalEventsTest-1668494258 tempest-ServerExternalEventsTest-1668494258-project] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 645.483299] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-vif-plugged-0622e137-f260-4168-8beb-bd4491038e88 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.483430] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquiring lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.483629] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.485285] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.485285] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] No waiting events found dispatching network-vif-plugged-0622e137-f260-4168-8beb-bd4491038e88 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 645.485285] env[68279]: WARNING nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received unexpected event network-vif-plugged-0622e137-f260-4168-8beb-bd4491038e88 for instance with vm_state building and task_state spawning. [ 645.485453] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-changed-0622e137-f260-4168-8beb-bd4491038e88 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.485782] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Refreshing instance network info cache due to event network-changed-0622e137-f260-4168-8beb-bd4491038e88. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 645.485782] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquiring lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.485887] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquired lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.486010] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Refreshing network info cache for port 0622e137-f260-4168-8beb-bd4491038e88 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.662577] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Releasing lock "refresh_cache-010e5bfc-814c-4bde-8a16-7c2009ee13b6" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.663160] env[68279]: DEBUG nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Instance network_info: |[{"id": "2c75e839-da7d-4baa-85d0-ea0ad60abf2c", "address": "fa:16:3e:1d:b0:f3", "network": {"id": "3b6d3d94-a4e4-47ec-af59-8a3d2f2b954b", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1272323467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "494fc6347abb4d6cba544a933229e2ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51bac3c3-00ab-4a07-9e28-b3c951dee565", "external-id": "nsx-vlan-transportzone-645", "segmentation_id": 645, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c75e839-da", "ovs_interfaceid": "2c75e839-da7d-4baa-85d0-ea0ad60abf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 645.663981] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:b0:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51bac3c3-00ab-4a07-9e28-b3c951dee565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c75e839-da7d-4baa-85d0-ea0ad60abf2c', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 645.676550] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Creating folder: Project (494fc6347abb4d6cba544a933229e2ed). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 645.676906] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ff26392-2f04-4231-892d-be36d0686ad3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.691388] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Created folder: Project (494fc6347abb4d6cba544a933229e2ed) in parent group-v594445. [ 645.691578] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Creating folder: Instances. Parent ref: group-v594486. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 645.691812] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cbb0baa-4590-4644-9710-0419f7bd48e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.709353] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Created folder: Instances in parent group-v594486. [ 645.709871] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 645.709871] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 645.710048] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-effe3d03-b187-4d58-8460-4fd5ea59185d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.731183] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 645.731183] env[68279]: value = "task-2962521" [ 645.731183] env[68279]: _type = "Task" [ 645.731183] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.739047] env[68279]: DEBUG nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.747227] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962521, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.834722] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Successfully updated port: 83a927c2-f8a2-4b72-a78e-a206cc03e8d8 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 645.948269] env[68279]: ERROR nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [req-f32f0575-9419-43fa-b87e-822d3ee4be74] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f32f0575-9419-43fa-b87e-822d3ee4be74"}]} [ 645.948832] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.526s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.956306] env[68279]: ERROR nova.compute.manager [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Failed to build and run instance: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 (generation 33): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f32f0575-9419-43fa-b87e-822d3ee4be74"}]} [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Traceback (most recent call last): [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] with self.rt.instance_claim(context, instance, node, allocs, [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] return f(*args, **kwargs) [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 215, in instance_claim [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] self._update(elevated, cn) [ 645.956306] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] self._update_to_placement(context, compute_node, startup) [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] raise attempt.get() [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] six.reraise(self.value[0], self.value[1], self.value[2]) [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] raise value [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 645.956666] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] self.reportclient.update_from_provider_tree( [ 645.957084] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 645.957084] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] self.set_inventory_for_provider( [ 645.957084] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 645.957084] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] raise exception.ResourceProviderUpdateConflict( [ 645.957084] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 (generation 33): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f32f0575-9419-43fa-b87e-822d3ee4be74"}]} [ 645.957084] env[68279]: ERROR nova.compute.manager [instance: 41d87520-2f40-4313-a14f-84688e979ac2] [ 645.957084] env[68279]: DEBUG nova.compute.utils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] A conflict was encountered attempting to update resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 (generation 33): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource pro {{(pid=68279) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 645.959737] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.177s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.962506] env[68279]: INFO nova.compute.claims [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.972817] env[68279]: DEBUG nova.compute.manager [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Build of instance 41d87520-2f40-4313-a14f-84688e979ac2 was re-scheduled: A conflict was encountered attempting to update resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 (generation 33): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f32f0575-9419-43fa-b87e-822d3ee4be74"}]} {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 645.973360] env[68279]: DEBUG nova.compute.manager [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Unplugging VIFs for instance {{(pid=68279) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 645.973795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "refresh_cache-41d87520-2f40-4313-a14f-84688e979ac2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.973795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquired lock "refresh_cache-41d87520-2f40-4313-a14f-84688e979ac2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.973933] env[68279]: DEBUG nova.network.neutron [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.065947] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.150832] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.150832] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.250496] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962521, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.273928] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.282432] env[68279]: DEBUG nova.compute.manager [None req-42acd9d6-5864-43aa-bd6d-3b32ea61d182 tempest-ServerDiagnosticsV248Test-1426662560 tempest-ServerDiagnosticsV248Test-1426662560-project-admin] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.283331] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d249196-f495-4070-b188-2a85d8f22add {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.290313] env[68279]: INFO nova.compute.manager [None req-42acd9d6-5864-43aa-bd6d-3b32ea61d182 tempest-ServerDiagnosticsV248Test-1426662560 tempest-ServerDiagnosticsV248Test-1426662560-project-admin] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Retrieving diagnostics [ 646.291427] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f048d41-61a4-45f6-a8de-8ecae72c6660 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.338740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.469086] env[68279]: DEBUG nova.network.neutron [None req-1274f35d-3702-4324-a0ea-03438b6aa51c tempest-ServerExternalEventsTest-1668494258 tempest-ServerExternalEventsTest-1668494258-project] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Updating instance_info_cache with network_info: [{"id": "5340623b-93eb-400e-be25-8fc583cdfe46", "address": "fa:16:3e:3f:79:51", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5340623b-93", "ovs_interfaceid": "5340623b-93eb-400e-be25-8fc583cdfe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.500983] env[68279]: DEBUG nova.network.neutron [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.556392] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.726356] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "c9bda338-6c7d-4850-8f46-7cd916372ac9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.727530] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.729587] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "c9bda338-6c7d-4850-8f46-7cd916372ac9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.729587] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.729587] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.731627] env[68279]: INFO nova.compute.manager [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Terminating instance [ 646.746387] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962521, 'name': CreateVM_Task, 'duration_secs': 0.600336} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.749032] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 646.749032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.749032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.749032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 646.749587] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5223089-b849-4cc3-9fbc-bedbc19090f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.755337] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 646.755337] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528149d7-9313-7238-8d6a-495571950745" [ 646.755337] env[68279]: _type = "Task" [ 646.755337] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.769797] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528149d7-9313-7238-8d6a-495571950745, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.832255] env[68279]: DEBUG nova.network.neutron [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.976126] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1274f35d-3702-4324-a0ea-03438b6aa51c tempest-ServerExternalEventsTest-1668494258 tempest-ServerExternalEventsTest-1668494258-project] Releasing lock "refresh_cache-4c99c929-9fda-42f0-9327-0508ad3e6150" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.005361] env[68279]: DEBUG nova.scheduler.client.report [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 647.031953] env[68279]: DEBUG nova.scheduler.client.report [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 647.032209] env[68279]: DEBUG nova.compute.provider_tree [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 647.051529] env[68279]: DEBUG nova.scheduler.client.report [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 647.061220] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Releasing lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.061497] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Received event network-changed-e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 647.061672] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Refreshing instance network info cache due to event network-changed-e402e4c3-f9cc-469b-a10e-86b1f89eddad. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 647.061862] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquiring lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.061995] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquired lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.062173] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Refreshing network info cache for port e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.063404] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.063549] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.078266] env[68279]: DEBUG nova.scheduler.client.report [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 647.138402] env[68279]: DEBUG nova.compute.manager [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Received event network-vif-plugged-2c75e839-da7d-4baa-85d0-ea0ad60abf2c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 647.138402] env[68279]: DEBUG oslo_concurrency.lockutils [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] Acquiring lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.138402] env[68279]: DEBUG oslo_concurrency.lockutils [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.138402] env[68279]: DEBUG oslo_concurrency.lockutils [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.138402] env[68279]: DEBUG nova.compute.manager [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] No waiting events found dispatching network-vif-plugged-2c75e839-da7d-4baa-85d0-ea0ad60abf2c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 647.138655] env[68279]: WARNING nova.compute.manager [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Received unexpected event network-vif-plugged-2c75e839-da7d-4baa-85d0-ea0ad60abf2c for instance with vm_state building and task_state spawning. [ 647.138655] env[68279]: DEBUG nova.compute.manager [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Received event network-changed-2c75e839-da7d-4baa-85d0-ea0ad60abf2c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 647.139015] env[68279]: DEBUG nova.compute.manager [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Refreshing instance network info cache due to event network-changed-2c75e839-da7d-4baa-85d0-ea0ad60abf2c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 647.139087] env[68279]: DEBUG oslo_concurrency.lockutils [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] Acquiring lock "refresh_cache-010e5bfc-814c-4bde-8a16-7c2009ee13b6" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.141025] env[68279]: DEBUG oslo_concurrency.lockutils [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] Acquired lock "refresh_cache-010e5bfc-814c-4bde-8a16-7c2009ee13b6" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.141025] env[68279]: DEBUG nova.network.neutron [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Refreshing network info cache for port 2c75e839-da7d-4baa-85d0-ea0ad60abf2c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.243016] env[68279]: DEBUG nova.compute.manager [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 647.243016] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 647.243016] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183a3ef7-67c5-4702-a941-ca0ea051cb08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.254516] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 647.258748] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05a42ec2-0cec-4384-9a86-2bb9930ab7ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.272418] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528149d7-9313-7238-8d6a-495571950745, 'name': SearchDatastore_Task, 'duration_secs': 0.013522} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.272704] env[68279]: DEBUG oslo_vmware.api [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 647.272704] env[68279]: value = "task-2962522" [ 647.272704] env[68279]: _type = "Task" [ 647.272704] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.272963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.273203] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 647.273430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.273567] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.273736] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 647.274407] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b94502b7-ab7c-485c-ac86-f3c1e41882e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.286926] env[68279]: DEBUG oslo_vmware.api [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962522, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.288222] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 647.289221] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 647.289221] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b393b26-bde4-4bfe-a4d1-07de73a5b815 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.294347] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 647.294347] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524682b0-69a1-1898-1846-fd89d4d2a4a6" [ 647.294347] env[68279]: _type = "Task" [ 647.294347] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.302986] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524682b0-69a1-1898-1846-fd89d4d2a4a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.345083] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Releasing lock "refresh_cache-41d87520-2f40-4313-a14f-84688e979ac2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.345364] env[68279]: DEBUG nova.compute.manager [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68279) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 647.345578] env[68279]: DEBUG nova.compute.manager [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 647.345738] env[68279]: DEBUG nova.network.neutron [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.391583] env[68279]: DEBUG nova.network.neutron [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.614511] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e04eef1-72f8-46df-9bb0-039fb3abfb13 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.622822] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea24332c-2e43-40b2-a33c-16746790486b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.660108] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.664226] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133b0714-4c19-46f4-92c5-4841d462ea35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.673474] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc923936-5002-4610-b614-fb7e61de8891 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.689871] env[68279]: DEBUG nova.compute.provider_tree [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 647.794414] env[68279]: DEBUG oslo_vmware.api [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962522, 'name': PowerOffVM_Task, 'duration_secs': 0.238453} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.794802] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 647.794979] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 647.798468] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-514ed1c2-6677-43f2-a7d0-d87d9170d0a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.806841] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524682b0-69a1-1898-1846-fd89d4d2a4a6, 'name': SearchDatastore_Task, 'duration_secs': 0.011212} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.807939] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1db75ae4-1eb6-4899-a38a-e137a3ebfb9c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.814021] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 647.814021] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7240d-89ac-996b-d439-1d3281eceeee" [ 647.814021] env[68279]: _type = "Task" [ 647.814021] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.826975] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7240d-89ac-996b-d439-1d3281eceeee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.858244] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "4c99c929-9fda-42f0-9327-0508ad3e6150" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.858552] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.858789] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "4c99c929-9fda-42f0-9327-0508ad3e6150-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.858983] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.859189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.860977] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 647.861210] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 647.861408] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Deleting the datastore file [datastore1] c9bda338-6c7d-4850-8f46-7cd916372ac9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 647.861944] env[68279]: INFO nova.compute.manager [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Terminating instance [ 647.863506] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aa2fdb5-a73d-4bd3-bfa6-45979d9a5213 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.871132] env[68279]: DEBUG oslo_vmware.api [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for the task: (returnval){ [ 647.871132] env[68279]: value = "task-2962525" [ 647.871132] env[68279]: _type = "Task" [ 647.871132] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.891467] env[68279]: DEBUG oslo_vmware.api [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.898687] env[68279]: DEBUG nova.network.neutron [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.902810] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "b869231a-5293-433f-ac7c-d50030368826" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.903074] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "b869231a-5293-433f-ac7c-d50030368826" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.903283] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "b869231a-5293-433f-ac7c-d50030368826-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.903461] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "b869231a-5293-433f-ac7c-d50030368826-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.903647] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "b869231a-5293-433f-ac7c-d50030368826-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.905952] env[68279]: INFO nova.compute.manager [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Terminating instance [ 647.931184] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updated VIF entry in instance network info cache for port e402e4c3-f9cc-469b-a10e-86b1f89eddad. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 647.931249] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updating instance_info_cache with network_info: [{"id": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "address": "fa:16:3e:05:7a:8e", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape402e4c3-f9", "ovs_interfaceid": "e402e4c3-f9cc-469b-a10e-86b1f89eddad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.242410] env[68279]: DEBUG nova.scheduler.client.report [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 34 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 648.242410] env[68279]: DEBUG nova.compute.provider_tree [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 34 to 35 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 648.242532] env[68279]: DEBUG nova.compute.provider_tree [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 648.249845] env[68279]: DEBUG nova.network.neutron [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Updated VIF entry in instance network info cache for port 2c75e839-da7d-4baa-85d0-ea0ad60abf2c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 648.250193] env[68279]: DEBUG nova.network.neutron [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Updating instance_info_cache with network_info: [{"id": "2c75e839-da7d-4baa-85d0-ea0ad60abf2c", "address": "fa:16:3e:1d:b0:f3", "network": {"id": "3b6d3d94-a4e4-47ec-af59-8a3d2f2b954b", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1272323467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "494fc6347abb4d6cba544a933229e2ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51bac3c3-00ab-4a07-9e28-b3c951dee565", "external-id": "nsx-vlan-transportzone-645", "segmentation_id": 645, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c75e839-da", "ovs_interfaceid": "2c75e839-da7d-4baa-85d0-ea0ad60abf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.326746] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7240d-89ac-996b-d439-1d3281eceeee, 'name': SearchDatastore_Task, 'duration_secs': 0.012598} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.327105] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.327215] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 010e5bfc-814c-4bde-8a16-7c2009ee13b6/010e5bfc-814c-4bde-8a16-7c2009ee13b6.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 648.327453] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2db02b72-c853-4ace-8f0a-eedb719ee136 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.336288] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 648.336288] env[68279]: value = "task-2962526" [ 648.336288] env[68279]: _type = "Task" [ 648.336288] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.343125] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.369272] env[68279]: DEBUG nova.compute.manager [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 648.369272] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 648.372069] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e665feec-a4d2-4190-ae72-9e9a9188bd13 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.388099] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 648.391210] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e563186-caa1-4f48-bdfb-e306ecd6c105 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.394308] env[68279]: DEBUG oslo_vmware.api [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Task: {'id': task-2962525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247757} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.394555] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 648.394732] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 648.394904] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 648.395089] env[68279]: INFO nova.compute.manager [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 648.395333] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 648.395886] env[68279]: DEBUG nova.compute.manager [-] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 648.395986] env[68279]: DEBUG nova.network.neutron [-] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.400276] env[68279]: DEBUG oslo_vmware.api [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 648.400276] env[68279]: value = "task-2962527" [ 648.400276] env[68279]: _type = "Task" [ 648.400276] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.400804] env[68279]: INFO nova.compute.manager [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: 41d87520-2f40-4313-a14f-84688e979ac2] Took 1.05 seconds to deallocate network for instance. [ 648.412354] env[68279]: DEBUG nova.compute.manager [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 648.412467] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 648.416909] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd279a38-9c34-4717-be47-08d01ccb4d82 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.419654] env[68279]: DEBUG oslo_vmware.api [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.425647] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 648.425956] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32d6073b-2ec2-49ea-a421-d0283597f3e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.435462] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Releasing lock "refresh_cache-6b778e98-12c2-42a5-a772-06ea32d090b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.438043] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-vif-plugged-98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.438043] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquiring lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.438043] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.438043] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.438043] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] No waiting events found dispatching network-vif-plugged-98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 648.438551] env[68279]: WARNING nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received unexpected event network-vif-plugged-98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 for instance with vm_state building and task_state spawning. [ 648.438551] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-changed-98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.438551] env[68279]: DEBUG nova.compute.manager [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Refreshing instance network info cache due to event network-changed-98fb9edf-552e-4a39-8b5b-4b81ddaf69b6. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 648.438551] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquiring lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.438551] env[68279]: DEBUG oslo_vmware.api [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 648.438551] env[68279]: value = "task-2962528" [ 648.438551] env[68279]: _type = "Task" [ 648.438551] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.451407] env[68279]: DEBUG oslo_vmware.api [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.753565] env[68279]: DEBUG oslo_concurrency.lockutils [req-6505b8a1-52fd-4be0-b97f-1eb015b02c77 req-239b1fe2-0ba3-4b7a-a949-303dc3b03b65 service nova] Releasing lock "refresh_cache-010e5bfc-814c-4bde-8a16-7c2009ee13b6" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.754467] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.754987] env[68279]: DEBUG nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 648.766028] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.961s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.766028] env[68279]: INFO nova.compute.claims [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.777851] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "239d0522-5101-49e0-8d3b-85b54927cd21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.777851] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "239d0522-5101-49e0-8d3b-85b54927cd21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.777851] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "239d0522-5101-49e0-8d3b-85b54927cd21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.778077] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "239d0522-5101-49e0-8d3b-85b54927cd21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.779035] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "239d0522-5101-49e0-8d3b-85b54927cd21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.783656] env[68279]: INFO nova.compute.manager [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Terminating instance [ 648.850400] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962526, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.920950] env[68279]: DEBUG oslo_vmware.api [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962527, 'name': PowerOffVM_Task, 'duration_secs': 0.267263} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.921539] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 648.921765] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 648.922090] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4490d64e-cbb2-470e-816e-c7646bb082e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.957167] env[68279]: DEBUG oslo_vmware.api [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962528, 'name': PowerOffVM_Task, 'duration_secs': 0.233813} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.957167] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 648.957279] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 648.957517] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-829c696a-f832-4f0b-9343-b0405578d749 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.963686] env[68279]: DEBUG nova.network.neutron [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updating instance_info_cache with network_info: [{"id": "0622e137-f260-4168-8beb-bd4491038e88", "address": "fa:16:3e:6e:79:86", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0622e137-f2", "ovs_interfaceid": "0622e137-f260-4168-8beb-bd4491038e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "address": "fa:16:3e:07:16:ac", "network": {"id": "f142e087-b46c-4e41-a7b8-de950b518d41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755266048", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fb9edf-55", "ovs_interfaceid": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "address": "fa:16:3e:7b:7d:a5", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a927c2-f8", "ovs_interfaceid": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.996439] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 648.996598] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 648.996809] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Deleting the datastore file [datastore1] 4c99c929-9fda-42f0-9327-0508ad3e6150 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 648.997097] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b8d4a41-54d3-493b-be2c-79ad681e0b97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.005458] env[68279]: DEBUG oslo_vmware.api [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for the task: (returnval){ [ 649.005458] env[68279]: value = "task-2962531" [ 649.005458] env[68279]: _type = "Task" [ 649.005458] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.018996] env[68279]: DEBUG oslo_vmware.api [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.026055] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 649.026428] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 649.027740] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Deleting the datastore file [datastore2] b869231a-5293-433f-ac7c-d50030368826 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 649.027740] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ee0a4d7-ac00-4063-9ec8-131ebdd2af10 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.035919] env[68279]: DEBUG oslo_vmware.api [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for the task: (returnval){ [ 649.035919] env[68279]: value = "task-2962532" [ 649.035919] env[68279]: _type = "Task" [ 649.035919] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.046418] env[68279]: DEBUG oslo_vmware.api [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.054525] env[68279]: DEBUG nova.compute.manager [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Received event network-changed-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.054525] env[68279]: DEBUG nova.compute.manager [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Refreshing instance network info cache due to event network-changed-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 649.055492] env[68279]: DEBUG oslo_concurrency.lockutils [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] Acquiring lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.055492] env[68279]: DEBUG oslo_concurrency.lockutils [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] Acquired lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.055492] env[68279]: DEBUG nova.network.neutron [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Refreshing network info cache for port 949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 649.274549] env[68279]: DEBUG nova.compute.utils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 649.282705] env[68279]: DEBUG nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 649.282705] env[68279]: DEBUG nova.network.neutron [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 649.294189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "refresh_cache-239d0522-5101-49e0-8d3b-85b54927cd21" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.294189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquired lock "refresh_cache-239d0522-5101-49e0-8d3b-85b54927cd21" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.294189] env[68279]: DEBUG nova.network.neutron [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.351024] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522297} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.354039] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 010e5bfc-814c-4bde-8a16-7c2009ee13b6/010e5bfc-814c-4bde-8a16-7c2009ee13b6.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 649.354039] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 649.354039] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-492b5099-e61d-47f5-ad9c-d90b9d4819f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.361319] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 649.361319] env[68279]: value = "task-2962533" [ 649.361319] env[68279]: _type = "Task" [ 649.361319] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.377683] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962533, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.443989] env[68279]: INFO nova.scheduler.client.report [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Deleted allocations for instance 41d87520-2f40-4313-a14f-84688e979ac2 [ 649.467614] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Releasing lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.468078] env[68279]: DEBUG nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Instance network_info: |[{"id": "0622e137-f260-4168-8beb-bd4491038e88", "address": "fa:16:3e:6e:79:86", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0622e137-f2", "ovs_interfaceid": "0622e137-f260-4168-8beb-bd4491038e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "address": "fa:16:3e:07:16:ac", "network": {"id": "f142e087-b46c-4e41-a7b8-de950b518d41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755266048", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fb9edf-55", "ovs_interfaceid": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "address": "fa:16:3e:7b:7d:a5", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a927c2-f8", "ovs_interfaceid": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 649.468633] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Acquired lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.469622] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Refreshing network info cache for port 98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 649.470865] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:79:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0622e137-f260-4168-8beb-bd4491038e88', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:16:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc16c915-cff1-4faa-a529-9773ee9bab7e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98fb9edf-552e-4a39-8b5b-4b81ddaf69b6', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:7d:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83a927c2-f8a2-4b72-a78e-a206cc03e8d8', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 649.485980] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Creating folder: Project (4632448f387a49eda08bcdc55b94a84c). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.487463] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-669732a1-d53c-4da1-966b-b4c7680cd5f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.493560] env[68279]: DEBUG nova.policy [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '981b28563b0f45d495a0d2dda2a25d3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab096093920e44a5b89eac4266233a62', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 649.503311] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Created folder: Project (4632448f387a49eda08bcdc55b94a84c) in parent group-v594445. [ 649.503549] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Creating folder: Instances. Parent ref: group-v594490. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.503801] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bf6d3b2-b1f1-4fcf-a9bb-26a7d52a4f8d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.518208] env[68279]: DEBUG oslo_vmware.api [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.522880] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Created folder: Instances in parent group-v594490. [ 649.523156] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 649.523338] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 649.523544] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f039364-e113-4268-8438-8afc789ac3de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.791542] env[68279]: DEBUG oslo_vmware.api [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.791542] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.791542] env[68279]: value = "task-2962536" [ 649.791542] env[68279]: _type = "Task" [ 649.791542] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.791542] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962536, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.791542] env[68279]: DEBUG nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 649.861444] env[68279]: DEBUG nova.network.neutron [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.878390] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962533, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.246645} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.883198] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 649.884586] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4de0a00-e1a4-4f99-8319-f58603295951 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.902515] env[68279]: DEBUG nova.network.neutron [-] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.914182] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 010e5bfc-814c-4bde-8a16-7c2009ee13b6/010e5bfc-814c-4bde-8a16-7c2009ee13b6.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 649.917896] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fa39e57-ab82-492d-a718-5f55ed53ad3a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.945841] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 649.945841] env[68279]: value = "task-2962538" [ 649.945841] env[68279]: _type = "Task" [ 649.945841] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.955143] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e65be1b2-7c69-40cc-b688-0c33a614f1bd tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "41d87520-2f40-4313-a14f-84688e979ac2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.231s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.960656] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962538, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.016901] env[68279]: DEBUG oslo_vmware.api [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Task: {'id': task-2962531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.654577} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.017436] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 650.017539] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 650.017751] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 650.017847] env[68279]: INFO nova.compute.manager [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Took 1.65 seconds to destroy the instance on the hypervisor. [ 650.018338] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.018555] env[68279]: DEBUG nova.compute.manager [-] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 650.018652] env[68279]: DEBUG nova.network.neutron [-] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 650.065861] env[68279]: DEBUG oslo_vmware.api [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Task: {'id': task-2962532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.632986} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.069602] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 650.069602] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 650.069761] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 650.069812] env[68279]: INFO nova.compute.manager [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] [instance: b869231a-5293-433f-ac7c-d50030368826] Took 1.66 seconds to destroy the instance on the hypervisor. [ 650.070046] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.070242] env[68279]: DEBUG nova.compute.manager [-] [instance: b869231a-5293-433f-ac7c-d50030368826] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 650.070333] env[68279]: DEBUG nova.network.neutron [-] [instance: b869231a-5293-433f-ac7c-d50030368826] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 650.077970] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962536, 'name': CreateVM_Task, 'duration_secs': 0.500568} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.080682] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.082627] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.082794] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.083116] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 650.085421] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2cc1547-fda5-49f8-bba0-9a6bd0581d84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.091372] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 650.091372] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523154e8-cfeb-2d14-c6f4-ee1ff04d15f1" [ 650.091372] env[68279]: _type = "Task" [ 650.091372] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.106958] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523154e8-cfeb-2d14-c6f4-ee1ff04d15f1, 'name': SearchDatastore_Task, 'duration_secs': 0.010121} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.106958] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.106958] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.107244] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.107418] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.107567] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.110201] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e967d353-3a1e-4ede-8670-226e0a972e43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.119838] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.120035] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 650.120887] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0169671-21d2-4f7d-9b89-9b07a21bcc17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.130264] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 650.130264] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52063ab2-10c2-0026-7468-e956d2eaeb60" [ 650.130264] env[68279]: _type = "Task" [ 650.130264] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.134013] env[68279]: DEBUG nova.network.neutron [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.144386] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52063ab2-10c2-0026-7468-e956d2eaeb60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.295894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bdc39b-f855-4aa3-ac4b-6521229f43d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.306121] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26089ae8-be8e-4362-b0d8-77e75aba2934 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.351854] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8aef75a-6b54-4ca2-b47c-bdac620a5288 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.361574] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2d7cba-3ba3-45d0-a956-02b8e8bc5d5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.382300] env[68279]: DEBUG nova.compute.provider_tree [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.416507] env[68279]: INFO nova.compute.manager [-] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Took 2.02 seconds to deallocate network for instance. [ 650.459700] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962538, 'name': ReconfigVM_Task, 'duration_secs': 0.33675} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.463800] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 010e5bfc-814c-4bde-8a16-7c2009ee13b6/010e5bfc-814c-4bde-8a16-7c2009ee13b6.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 650.463800] env[68279]: DEBUG nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 650.465383] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d55f7990-cecd-4da1-b7cb-259517654bdd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.474350] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 650.474350] env[68279]: value = "task-2962539" [ 650.474350] env[68279]: _type = "Task" [ 650.474350] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.484485] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962539, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.636732] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Releasing lock "refresh_cache-239d0522-5101-49e0-8d3b-85b54927cd21" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.637402] env[68279]: DEBUG nova.compute.manager [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 650.637756] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.642833] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976d6e54-b712-4854-9d48-462635f83dcb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.646770] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52063ab2-10c2-0026-7468-e956d2eaeb60, 'name': SearchDatastore_Task, 'duration_secs': 0.014105} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.648403] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e1d2129-e66f-401e-ac07-3a0ba2227639 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.654317] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.655148] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54005d43-099c-4bbf-a543-682867ca5d1c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.658388] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 650.658388] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5267ee23-7937-3cea-c810-6e925303a7ec" [ 650.658388] env[68279]: _type = "Task" [ 650.658388] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.669021] env[68279]: DEBUG oslo_vmware.api [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 650.669021] env[68279]: value = "task-2962540" [ 650.669021] env[68279]: _type = "Task" [ 650.669021] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.678575] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5267ee23-7937-3cea-c810-6e925303a7ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.689365] env[68279]: DEBUG oslo_vmware.api [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962540, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.719151] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updated VIF entry in instance network info cache for port 98fb9edf-552e-4a39-8b5b-4b81ddaf69b6. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 650.719690] env[68279]: DEBUG nova.network.neutron [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updating instance_info_cache with network_info: [{"id": "0622e137-f260-4168-8beb-bd4491038e88", "address": "fa:16:3e:6e:79:86", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0622e137-f2", "ovs_interfaceid": "0622e137-f260-4168-8beb-bd4491038e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "address": "fa:16:3e:07:16:ac", "network": {"id": "f142e087-b46c-4e41-a7b8-de950b518d41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755266048", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fb9edf-55", "ovs_interfaceid": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "address": "fa:16:3e:7b:7d:a5", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a927c2-f8", "ovs_interfaceid": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.817479] env[68279]: DEBUG nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 650.844433] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 650.844682] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.844992] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 650.845263] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.845425] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 650.845585] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 650.845796] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 650.845982] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 650.846168] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 650.846641] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 650.846941] env[68279]: DEBUG nova.virt.hardware [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 650.848140] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2aae73-caaf-40d7-a9b0-9f28e4e1ed38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.857671] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f63126-1442-4d1c-b933-fda7b45e35e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.888409] env[68279]: DEBUG nova.scheduler.client.report [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.923823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.962912] env[68279]: DEBUG nova.network.neutron [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updated VIF entry in instance network info cache for port 949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 650.964377] env[68279]: DEBUG nova.network.neutron [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updating instance_info_cache with network_info: [{"id": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "address": "fa:16:3e:ad:43:12", "network": {"id": "d2cab8c6-aa6c-4741-8ffe-4e7a4f40a698", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2022110668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0703802e5fb4caf939a345174f379be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949c2fdc-85", "ovs_interfaceid": "949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.999533] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962539, 'name': Rename_Task, 'duration_secs': 0.219831} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.000606] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.000984] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 651.001541] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75bb4f88-3522-4293-ad4e-1bc26716c218 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.010556] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 651.010556] env[68279]: value = "task-2962541" [ 651.010556] env[68279]: _type = "Task" [ 651.010556] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.023387] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962541, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.171413] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5267ee23-7937-3cea-c810-6e925303a7ec, 'name': SearchDatastore_Task, 'duration_secs': 0.025256} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.175785] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.176253] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] b40956fc-66f5-4bb6-8763-22465bb221bf/b40956fc-66f5-4bb6-8763-22465bb221bf.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 651.176822] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f5f0c03-08e2-4236-a32d-c5b1b9d9b948 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.188339] env[68279]: DEBUG oslo_vmware.api [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962540, 'name': PowerOffVM_Task, 'duration_secs': 0.137306} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.190587] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.190959] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.191449] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 651.191449] env[68279]: value = "task-2962542" [ 651.191449] env[68279]: _type = "Task" [ 651.191449] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.191888] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ed9a56b-d16c-4bfd-aae3-91dc098e49ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.204844] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.223723] env[68279]: DEBUG oslo_concurrency.lockutils [req-aaf5cda3-0c92-4878-a2ad-2aae06295112 req-f9bd6a2f-d379-4c15-b4ed-0e686cc09350 service nova] Releasing lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.230021] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.230021] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.230021] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Deleting the datastore file [datastore2] 239d0522-5101-49e0-8d3b-85b54927cd21 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.230021] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-684f4278-4378-437e-ac4a-3bf440cf100d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.238261] env[68279]: DEBUG oslo_vmware.api [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for the task: (returnval){ [ 651.238261] env[68279]: value = "task-2962544" [ 651.238261] env[68279]: _type = "Task" [ 651.238261] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.243544] env[68279]: DEBUG nova.network.neutron [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Successfully created port: 4fbdad72-dc39-4f81-80af-7eb2c9db8d4e {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.251256] env[68279]: DEBUG oslo_vmware.api [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962544, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.297767] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.298494] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.394110] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.395037] env[68279]: DEBUG nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 651.398087] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.736s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.399890] env[68279]: INFO nova.compute.claims [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 651.466584] env[68279]: DEBUG oslo_concurrency.lockutils [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] Releasing lock "refresh_cache-6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.467776] env[68279]: DEBUG nova.compute.manager [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Received event network-changed-bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 651.468282] env[68279]: DEBUG nova.compute.manager [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Refreshing instance network info cache due to event network-changed-bd0e2597-abb7-4689-856c-4ad289b6c70d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 651.468698] env[68279]: DEBUG oslo_concurrency.lockutils [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] Acquiring lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.469050] env[68279]: DEBUG oslo_concurrency.lockutils [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] Acquired lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.469599] env[68279]: DEBUG nova.network.neutron [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Refreshing network info cache for port bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 651.525469] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962541, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.545050] env[68279]: DEBUG nova.network.neutron [-] [instance: b869231a-5293-433f-ac7c-d50030368826] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.559729] env[68279]: DEBUG nova.network.neutron [-] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.581723] env[68279]: DEBUG nova.compute.manager [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-vif-plugged-83a927c2-f8a2-4b72-a78e-a206cc03e8d8 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 651.582175] env[68279]: DEBUG oslo_concurrency.lockutils [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] Acquiring lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.582268] env[68279]: DEBUG oslo_concurrency.lockutils [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.582493] env[68279]: DEBUG oslo_concurrency.lockutils [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.582732] env[68279]: DEBUG nova.compute.manager [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] No waiting events found dispatching network-vif-plugged-83a927c2-f8a2-4b72-a78e-a206cc03e8d8 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 651.582939] env[68279]: WARNING nova.compute.manager [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received unexpected event network-vif-plugged-83a927c2-f8a2-4b72-a78e-a206cc03e8d8 for instance with vm_state building and task_state spawning. [ 651.583256] env[68279]: DEBUG nova.compute.manager [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-changed-83a927c2-f8a2-4b72-a78e-a206cc03e8d8 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 651.583403] env[68279]: DEBUG nova.compute.manager [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Refreshing instance network info cache due to event network-changed-83a927c2-f8a2-4b72-a78e-a206cc03e8d8. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 651.583656] env[68279]: DEBUG oslo_concurrency.lockutils [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] Acquiring lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.583805] env[68279]: DEBUG oslo_concurrency.lockutils [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] Acquired lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.584191] env[68279]: DEBUG nova.network.neutron [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Refreshing network info cache for port 83a927c2-f8a2-4b72-a78e-a206cc03e8d8 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 651.714319] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962542, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.746385] env[68279]: DEBUG oslo_vmware.api [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Task: {'id': task-2962544, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104779} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.746689] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.746874] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.747086] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.747239] env[68279]: INFO nova.compute.manager [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Took 1.11 seconds to destroy the instance on the hypervisor. [ 651.747475] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.747660] env[68279]: DEBUG nova.compute.manager [-] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 651.747756] env[68279]: DEBUG nova.network.neutron [-] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.781201] env[68279]: DEBUG nova.network.neutron [-] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.900858] env[68279]: DEBUG nova.compute.utils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 651.903581] env[68279]: DEBUG nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 651.903581] env[68279]: DEBUG nova.network.neutron [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.993945] env[68279]: DEBUG nova.policy [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3b2ea6742ff4fbfb9502705f949e4b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '486f9055435d482aa2ebcdf6fe29b948', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 652.024402] env[68279]: DEBUG oslo_vmware.api [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962541, 'name': PowerOnVM_Task, 'duration_secs': 0.523797} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.024784] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 652.024992] env[68279]: INFO nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Took 9.55 seconds to spawn the instance on the hypervisor. [ 652.025187] env[68279]: DEBUG nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 652.026227] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb8309c-eca7-40e4-8501-77d82d0da0b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.047412] env[68279]: INFO nova.compute.manager [-] [instance: b869231a-5293-433f-ac7c-d50030368826] Took 1.98 seconds to deallocate network for instance. [ 652.061199] env[68279]: INFO nova.compute.manager [-] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Took 2.04 seconds to deallocate network for instance. [ 652.212049] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575539} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.212343] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] b40956fc-66f5-4bb6-8763-22465bb221bf/b40956fc-66f5-4bb6-8763-22465bb221bf.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 652.212559] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 652.212877] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d59ad9f0-868c-48e9-8c06-ce1edcb787c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.222257] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 652.222257] env[68279]: value = "task-2962546" [ 652.222257] env[68279]: _type = "Task" [ 652.222257] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.234797] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962546, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.283404] env[68279]: DEBUG nova.network.neutron [-] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.405943] env[68279]: DEBUG nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 652.551706] env[68279]: INFO nova.compute.manager [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Took 20.52 seconds to build instance. [ 652.557992] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.575136] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.575820] env[68279]: DEBUG nova.network.neutron [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Updated VIF entry in instance network info cache for port bd0e2597-abb7-4689-856c-4ad289b6c70d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 652.576179] env[68279]: DEBUG nova.network.neutron [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Updating instance_info_cache with network_info: [{"id": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "address": "fa:16:3e:94:8e:b5", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd0e2597-ab", "ovs_interfaceid": "bd0e2597-abb7-4689-856c-4ad289b6c70d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.740728] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962546, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074105} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.741698] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 652.741780] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9efe6e6-3134-4136-8836-6af6e6956059 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.749083] env[68279]: DEBUG nova.network.neutron [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Successfully created port: 7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.775695] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] b40956fc-66f5-4bb6-8763-22465bb221bf/b40956fc-66f5-4bb6-8763-22465bb221bf.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 652.776205] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05165e82-c503-4547-ad24-9e007aca6794 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.794943] env[68279]: INFO nova.compute.manager [-] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Took 1.05 seconds to deallocate network for instance. [ 652.809226] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 652.809226] env[68279]: value = "task-2962547" [ 652.809226] env[68279]: _type = "Task" [ 652.809226] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.822202] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962547, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.876246] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99ababa-7ec5-44cb-9935-a909bf10e6dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.884723] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f274665e-cf6e-415a-88cc-02e3e1c8c79b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.926445] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a263871-01fb-4053-98fd-fd7566c9d993 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.935399] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c23b71-3828-40ca-bd62-d13292b993ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.951257] env[68279]: DEBUG nova.compute.provider_tree [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.012192] env[68279]: DEBUG nova.compute.manager [req-68109664-e62f-450d-949d-61e83afcb96c req-5fc0ea2c-e96c-46cc-8197-ca23305ebf46 service nova] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Received event network-vif-deleted-1e64662e-baf7-4c5a-9f9c-387637e18c28 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.059634] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b0d631ea-201e-4474-baf6-ef802ecc51d0 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.745s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.081868] env[68279]: DEBUG oslo_concurrency.lockutils [req-82da83ca-3233-4a6d-80b8-0c4c2c5ed08c req-62e2c21b-b7d8-41bd-b2d6-c39edda3081b service nova] Releasing lock "refresh_cache-65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.161825] env[68279]: DEBUG nova.network.neutron [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updated VIF entry in instance network info cache for port 83a927c2-f8a2-4b72-a78e-a206cc03e8d8. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 653.162386] env[68279]: DEBUG nova.network.neutron [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updating instance_info_cache with network_info: [{"id": "0622e137-f260-4168-8beb-bd4491038e88", "address": "fa:16:3e:6e:79:86", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0622e137-f2", "ovs_interfaceid": "0622e137-f260-4168-8beb-bd4491038e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "address": "fa:16:3e:07:16:ac", "network": {"id": "f142e087-b46c-4e41-a7b8-de950b518d41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755266048", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fb9edf-55", "ovs_interfaceid": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "address": "fa:16:3e:7b:7d:a5", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a927c2-f8", "ovs_interfaceid": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.302861] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.321932] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962547, 'name': ReconfigVM_Task, 'duration_secs': 0.410745} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.322318] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Reconfigured VM instance instance-0000000e to attach disk [datastore1] b40956fc-66f5-4bb6-8763-22465bb221bf/b40956fc-66f5-4bb6-8763-22465bb221bf.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 653.322925] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd6004c4-cf0b-481b-92ca-fbdac27fbc29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.331582] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 653.331582] env[68279]: value = "task-2962548" [ 653.331582] env[68279]: _type = "Task" [ 653.331582] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.345042] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962548, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.428565] env[68279]: DEBUG nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 653.454909] env[68279]: DEBUG nova.scheduler.client.report [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.474482] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 653.477016] env[68279]: DEBUG nova.virt.hardware [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 653.477783] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da63b98-e6bb-4cd5-a7fc-924f6de87264 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.488564] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bc861d-752a-4b1e-8544-b075e97371d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.562992] env[68279]: DEBUG nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.667337] env[68279]: DEBUG oslo_concurrency.lockutils [req-722a5394-1510-4579-bd19-7ace9d48bd14 req-16edc170-8732-4591-aad2-be7f53cbd56e service nova] Releasing lock "refresh_cache-b40956fc-66f5-4bb6-8763-22465bb221bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.851445] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962548, 'name': Rename_Task, 'duration_secs': 0.182812} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.851879] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 653.852238] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f9f361e-23d3-4dbd-bb1f-12f286ea1df2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.863988] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 653.863988] env[68279]: value = "task-2962550" [ 653.863988] env[68279]: _type = "Task" [ 653.863988] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.877055] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.958600] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "f7db383a-648a-4984-ae25-72bc2ccfe369" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.958877] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.962036] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.962540] env[68279]: DEBUG nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 653.967143] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 13.438s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.099695] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.379558] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962550, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.473721] env[68279]: DEBUG nova.compute.utils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 654.475593] env[68279]: DEBUG nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 654.475797] env[68279]: DEBUG nova.network.neutron [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 654.482728] env[68279]: INFO nova.compute.claims [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.810352] env[68279]: DEBUG nova.network.neutron [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Successfully updated port: 7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 654.873341] env[68279]: DEBUG nova.policy [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd67d0e35641a4494a5087e0f3abdc767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd1384256d224e80bf6f25b9fd054376', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 654.882122] env[68279]: DEBUG oslo_vmware.api [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962550, 'name': PowerOnVM_Task, 'duration_secs': 0.657917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.882455] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 654.882665] env[68279]: INFO nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Took 21.18 seconds to spawn the instance on the hypervisor. [ 654.882840] env[68279]: DEBUG nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 654.883649] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4580c76-9e4a-487b-a938-512ad40730d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.978921] env[68279]: DEBUG nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.993550] env[68279]: INFO nova.compute.resource_tracker [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating resource usage from migration 8c57a7eb-23a4-49a8-bb18-9efe945bfe06 [ 655.098974] env[68279]: DEBUG nova.network.neutron [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Successfully updated port: 4fbdad72-dc39-4f81-80af-7eb2c9db8d4e {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 655.315535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.315535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquired lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.315535] env[68279]: DEBUG nova.network.neutron [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.411880] env[68279]: INFO nova.compute.manager [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Took 31.72 seconds to build instance. [ 655.457443] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194427e5-7cc2-4370-9064-1d4624919150 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.468102] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8aa4a56-f4ca-4d2e-aca1-a3f1fd2d205a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.508535] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae048683-2ca9-4c83-bcfc-4c50f39f4492 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.517488] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0e3722-8d1c-472b-8621-60c1d4f138de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.532896] env[68279]: DEBUG nova.compute.provider_tree [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.599498] env[68279]: DEBUG nova.compute.manager [req-6478cf81-81fb-488e-a9f3-b4f76ffa9723 req-599dfda1-db06-4738-8135-5d8429951426 service nova] [instance: b869231a-5293-433f-ac7c-d50030368826] Received event network-vif-deleted-263143b2-0c62-4c8e-94f0-4801bce02f1b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 655.599709] env[68279]: DEBUG nova.compute.manager [req-6478cf81-81fb-488e-a9f3-b4f76ffa9723 req-599dfda1-db06-4738-8135-5d8429951426 service nova] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Received event network-vif-deleted-5340623b-93eb-400e-be25-8fc583cdfe46 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 655.601920] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "refresh_cache-9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.601920] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquired lock "refresh_cache-9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.601920] env[68279]: DEBUG nova.network.neutron [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.914448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-18727451-179d-4902-9039-6487ee9b1612 tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.240s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.961019] env[68279]: DEBUG nova.network.neutron [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.010290] env[68279]: DEBUG nova.network.neutron [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Successfully created port: 7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.017955] env[68279]: DEBUG nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 656.037986] env[68279]: DEBUG nova.scheduler.client.report [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 656.055047] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 656.055281] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.055433] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 656.055606] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.055741] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 656.055879] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 656.056361] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 656.056597] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 656.056846] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 656.057022] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 656.057316] env[68279]: DEBUG nova.virt.hardware [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 656.058320] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7214285c-4a24-48c8-8f9c-68d5f62da5a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.073170] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416c8ae8-f999-411b-adbb-befb04edb562 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.159419] env[68279]: DEBUG nova.network.neutron [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.346286] env[68279]: DEBUG nova.network.neutron [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Updating instance_info_cache with network_info: [{"id": "4fbdad72-dc39-4f81-80af-7eb2c9db8d4e", "address": "fa:16:3e:a0:27:90", "network": {"id": "b6b288a3-f382-44fb-b835-e8cf4310b1d9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1046474361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab096093920e44a5b89eac4266233a62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fbdad72-dc", "ovs_interfaceid": "4fbdad72-dc39-4f81-80af-7eb2c9db8d4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.405099] env[68279]: DEBUG nova.network.neutron [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Updating instance_info_cache with network_info: [{"id": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "address": "fa:16:3e:e1:8e:07", "network": {"id": "53711808-af51-416e-91d0-d0d066b1befb", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-761926569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "486f9055435d482aa2ebcdf6fe29b948", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bb2c9fd-3c", "ovs_interfaceid": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.420124] env[68279]: DEBUG nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.461631] env[68279]: DEBUG nova.compute.manager [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.462709] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97b2dbe-24f8-4e4a-aae0-7e7b3bfc20a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.549227] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.582s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.550193] env[68279]: INFO nova.compute.manager [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Migrating [ 656.550193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.550193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.551110] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.480s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.555666] env[68279]: INFO nova.compute.claims [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.842867] env[68279]: DEBUG nova.compute.manager [req-8b20d8cf-74ec-4393-a052-b68e34081731 req-4d798b85-d96c-4982-9b39-8fedff06e570 service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Received event network-vif-plugged-4fbdad72-dc39-4f81-80af-7eb2c9db8d4e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 656.843098] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b20d8cf-74ec-4393-a052-b68e34081731 req-4d798b85-d96c-4982-9b39-8fedff06e570 service nova] Acquiring lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.844019] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b20d8cf-74ec-4393-a052-b68e34081731 req-4d798b85-d96c-4982-9b39-8fedff06e570 service nova] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.844019] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b20d8cf-74ec-4393-a052-b68e34081731 req-4d798b85-d96c-4982-9b39-8fedff06e570 service nova] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.844019] env[68279]: DEBUG nova.compute.manager [req-8b20d8cf-74ec-4393-a052-b68e34081731 req-4d798b85-d96c-4982-9b39-8fedff06e570 service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] No waiting events found dispatching network-vif-plugged-4fbdad72-dc39-4f81-80af-7eb2c9db8d4e {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.844299] env[68279]: WARNING nova.compute.manager [req-8b20d8cf-74ec-4393-a052-b68e34081731 req-4d798b85-d96c-4982-9b39-8fedff06e570 service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Received unexpected event network-vif-plugged-4fbdad72-dc39-4f81-80af-7eb2c9db8d4e for instance with vm_state building and task_state spawning. [ 656.849803] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Releasing lock "refresh_cache-9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.850077] env[68279]: DEBUG nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Instance network_info: |[{"id": "4fbdad72-dc39-4f81-80af-7eb2c9db8d4e", "address": "fa:16:3e:a0:27:90", "network": {"id": "b6b288a3-f382-44fb-b835-e8cf4310b1d9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1046474361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab096093920e44a5b89eac4266233a62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fbdad72-dc", "ovs_interfaceid": "4fbdad72-dc39-4f81-80af-7eb2c9db8d4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.850474] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:27:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fbdad72-dc39-4f81-80af-7eb2c9db8d4e', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.860897] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Creating folder: Project (ab096093920e44a5b89eac4266233a62). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.861609] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-399b7c54-897a-4c66-9560-d9ceffaf9a17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.876202] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Created folder: Project (ab096093920e44a5b89eac4266233a62) in parent group-v594445. [ 656.876411] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Creating folder: Instances. Parent ref: group-v594493. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.876663] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e00564e4-b30a-4041-a33a-05a7149de6fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.894230] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Created folder: Instances in parent group-v594493. [ 656.894508] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.894783] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.895040] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9602cf45-2e34-4a1f-ab75-59c3e6527f95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.911800] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Releasing lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.912094] env[68279]: DEBUG nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Instance network_info: |[{"id": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "address": "fa:16:3e:e1:8e:07", "network": {"id": "53711808-af51-416e-91d0-d0d066b1befb", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-761926569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "486f9055435d482aa2ebcdf6fe29b948", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bb2c9fd-3c", "ovs_interfaceid": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.912585] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:8e:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7bb2c9fd-3cb4-4813-8661-d5baac85c2fd', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.921118] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Creating folder: Project (486f9055435d482aa2ebcdf6fe29b948). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.922241] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a55f26a6-184c-44ef-bbb1-69dfc992828e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.930816] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.930816] env[68279]: value = "task-2962553" [ 656.930816] env[68279]: _type = "Task" [ 656.930816] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.947106] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Created folder: Project (486f9055435d482aa2ebcdf6fe29b948) in parent group-v594445. [ 656.947106] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Creating folder: Instances. Parent ref: group-v594495. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.947190] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962553, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.948192] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3637674-3a00-442f-a2db-2a4eb44d78c4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.950314] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.960831] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Created folder: Instances in parent group-v594495. [ 656.961175] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.961436] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.961709] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-492ca92c-4f94-41ca-b5f7-3ad67a2f1142 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.985466] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.985466] env[68279]: value = "task-2962556" [ 656.985466] env[68279]: _type = "Task" [ 656.985466] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.986602] env[68279]: INFO nova.compute.manager [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] instance snapshotting [ 656.993351] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c96db18-16ef-49da-8526-25b627ea72c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.002937] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962556, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.017780] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8982b932-67e1-4be1-b888-0e07c3af3669 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.032651] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.032921] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.063754] env[68279]: INFO nova.compute.rpcapi [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Automatically selected compute RPC version 6.4 from minimum service version 69 [ 657.064283] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.084812] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "1d16a5c5-981b-474e-8159-820ac6fcc42d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.085050] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.445774] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962553, 'name': CreateVM_Task, 'duration_secs': 0.349458} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.446220] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 657.447390] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.447671] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.448335] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 657.448687] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eb1a053-9de3-4d5f-8a68-25ece8819da9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.456020] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 657.456020] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521be379-a483-68ae-5edc-81db912c2c6e" [ 657.456020] env[68279]: _type = "Task" [ 657.456020] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.467739] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521be379-a483-68ae-5edc-81db912c2c6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.501237] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962556, 'name': CreateVM_Task, 'duration_secs': 0.39271} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.501490] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 657.502905] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.539549] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 657.539549] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bf8f74a4-d3e7-4451-8274-c919fc4ef407 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.555527] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 657.555527] env[68279]: value = "task-2962557" [ 657.555527] env[68279]: _type = "Task" [ 657.555527] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.579010] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962557, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.581965] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.582190] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.584823] env[68279]: DEBUG nova.network.neutron [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.969251] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521be379-a483-68ae-5edc-81db912c2c6e, 'name': SearchDatastore_Task, 'duration_secs': 0.017165} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.972388] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.972531] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.972769] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.972910] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.974226] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.975699] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.975699] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 657.975699] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f37e38e-228b-439b-a7ca-3d36e57b5987 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.977226] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc157930-83e2-43df-89ed-e6f5861c8092 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.984185] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 657.984185] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5282a146-98ce-5863-5455-8152d7c4889a" [ 657.984185] env[68279]: _type = "Task" [ 657.984185] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.991638] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.992211] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 657.994727] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26d56a39-2d56-40c6-a8f1-7ca68b0d7bda {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.002519] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5282a146-98ce-5863-5455-8152d7c4889a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.007036] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 658.007036] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526f5c44-23a1-4791-2334-d77bcd960d65" [ 658.007036] env[68279]: _type = "Task" [ 658.007036] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.022471] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526f5c44-23a1-4791-2334-d77bcd960d65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.035723] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.035925] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.064483] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962557, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.116538] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049a9c5e-7bba-4e41-b038-91c67069cb67 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.124912] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106711f8-85d4-40b6-b06a-e14347af4473 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.160871] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de084d6-2666-4f7b-93e9-ef25a0a78f22 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.171687] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609e0c20-6f60-43e2-8eec-b34a2f670fcf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.189873] env[68279]: DEBUG nova.compute.provider_tree [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.461238] env[68279]: DEBUG nova.network.neutron [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Successfully updated port: 7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.495342] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5282a146-98ce-5863-5455-8152d7c4889a, 'name': SearchDatastore_Task, 'duration_secs': 0.023671} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.495639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.495871] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 658.496109] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.519507] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526f5c44-23a1-4791-2334-d77bcd960d65, 'name': SearchDatastore_Task, 'duration_secs': 0.025717} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.520329] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a1e29fb-65e9-40f5-b128-7518f1d223c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.526811] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 658.526811] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5237f654-ffa3-f7a5-3235-c0e8cd373d8c" [ 658.526811] env[68279]: _type = "Task" [ 658.526811] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.538626] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5237f654-ffa3-f7a5-3235-c0e8cd373d8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.566556] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962557, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.694786] env[68279]: DEBUG nova.scheduler.client.report [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 658.744611] env[68279]: DEBUG nova.network.neutron [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance_info_cache with network_info: [{"id": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "address": "fa:16:3e:00:83:88", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b424ba-67", "ovs_interfaceid": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.861536] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "d8eca7ac-744e-469c-9a87-901f0641f4f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.861815] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.862010] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.862190] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.962561] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.962721] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.962878] env[68279]: DEBUG nova.network.neutron [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.041204] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5237f654-ffa3-f7a5-3235-c0e8cd373d8c, 'name': SearchDatastore_Task, 'duration_secs': 0.02547} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.041204] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.041204] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4/9d4b56df-11d9-4d94-94f3-6c5e27ea85f4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 659.041204] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.041204] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.041204] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fa96bfb-a071-4fa3-9cd0-ca322121b3ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.041991] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4ffd20a-49a9-4791-a23e-28cc0b18e2a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.053466] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 659.053466] env[68279]: value = "task-2962558" [ 659.053466] env[68279]: _type = "Task" [ 659.053466] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.054739] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.054909] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 659.061721] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f434a95-3d01-4558-8ffa-4f33aa9f0312 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.070867] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 659.070867] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527dbdd1-d904-e9df-8e9e-9ffc09505d8f" [ 659.070867] env[68279]: _type = "Task" [ 659.070867] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.077530] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962557, 'name': CreateSnapshot_Task, 'duration_secs': 1.058302} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.077530] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.081048] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 659.081838] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77240c08-4e4a-47f7-9acb-c74fdf4e4ac1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.095454] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527dbdd1-d904-e9df-8e9e-9ffc09505d8f, 'name': SearchDatastore_Task, 'duration_secs': 0.010963} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.096416] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b06d8a6-9234-4999-b5e3-d761029d3a9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.103185] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 659.103185] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52895cbd-3947-54af-469e-23a0b045e2e3" [ 659.103185] env[68279]: _type = "Task" [ 659.103185] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.112862] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52895cbd-3947-54af-469e-23a0b045e2e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.201386] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.201386] env[68279]: DEBUG nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 659.204058] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.151s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.205693] env[68279]: INFO nova.compute.claims [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.210613] env[68279]: DEBUG nova.compute.manager [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Received event network-vif-plugged-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.210885] env[68279]: DEBUG oslo_concurrency.lockutils [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] Acquiring lock "336b7399-b64e-411f-99bc-ba0d292e371a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.211408] env[68279]: DEBUG oslo_concurrency.lockutils [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] Lock "336b7399-b64e-411f-99bc-ba0d292e371a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.211631] env[68279]: DEBUG oslo_concurrency.lockutils [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] Lock "336b7399-b64e-411f-99bc-ba0d292e371a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.211811] env[68279]: DEBUG nova.compute.manager [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] No waiting events found dispatching network-vif-plugged-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 659.211974] env[68279]: WARNING nova.compute.manager [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Received unexpected event network-vif-plugged-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd for instance with vm_state building and task_state spawning. [ 659.212142] env[68279]: DEBUG nova.compute.manager [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Received event network-changed-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.212299] env[68279]: DEBUG nova.compute.manager [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Refreshing instance network info cache due to event network-changed-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 659.212496] env[68279]: DEBUG oslo_concurrency.lockutils [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] Acquiring lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.212629] env[68279]: DEBUG oslo_concurrency.lockutils [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] Acquired lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.213927] env[68279]: DEBUG nova.network.neutron [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Refreshing network info cache for port 7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.246860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.509889] env[68279]: DEBUG nova.network.neutron [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.569649] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962558, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.606551] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 659.606551] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2a7b988f-50be-44ce-8be8-10ed152367ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.625141] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52895cbd-3947-54af-469e-23a0b045e2e3, 'name': SearchDatastore_Task, 'duration_secs': 0.011189} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.626750] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.627042] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 336b7399-b64e-411f-99bc-ba0d292e371a/336b7399-b64e-411f-99bc-ba0d292e371a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 659.627466] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 659.627466] env[68279]: value = "task-2962559" [ 659.627466] env[68279]: _type = "Task" [ 659.627466] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.627867] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd5d3c68-9256-434a-bdb1-d03360823741 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.640148] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 659.640148] env[68279]: value = "task-2962560" [ 659.640148] env[68279]: _type = "Task" [ 659.640148] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.644036] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962559, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.656592] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.710373] env[68279]: DEBUG nova.compute.utils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 659.712472] env[68279]: DEBUG nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 659.712622] env[68279]: DEBUG nova.network.neutron [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 659.814942] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Acquiring lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.814942] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.832228] env[68279]: DEBUG nova.policy [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48f362ecfd4840ecad589d3e3a373355', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90ec938e6c384c4f96779e4a0e091a80', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.885747] env[68279]: DEBUG nova.network.neutron [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updating instance_info_cache with network_info: [{"id": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "address": "fa:16:3e:f1:ad:db", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcc683d-e5", "ovs_interfaceid": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.969633] env[68279]: DEBUG nova.network.neutron [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Updated VIF entry in instance network info cache for port 7bb2c9fd-3cb4-4813-8661-d5baac85c2fd. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 659.969978] env[68279]: DEBUG nova.network.neutron [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Updating instance_info_cache with network_info: [{"id": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "address": "fa:16:3e:e1:8e:07", "network": {"id": "53711808-af51-416e-91d0-d0d066b1befb", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-761926569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "486f9055435d482aa2ebcdf6fe29b948", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bb2c9fd-3c", "ovs_interfaceid": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.007560] env[68279]: DEBUG nova.compute.manager [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Received event network-changed-4fbdad72-dc39-4f81-80af-7eb2c9db8d4e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 660.007699] env[68279]: DEBUG nova.compute.manager [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Refreshing instance network info cache due to event network-changed-4fbdad72-dc39-4f81-80af-7eb2c9db8d4e. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 660.007907] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Acquiring lock "refresh_cache-9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.008064] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Acquired lock "refresh_cache-9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.008320] env[68279]: DEBUG nova.network.neutron [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Refreshing network info cache for port 4fbdad72-dc39-4f81-80af-7eb2c9db8d4e {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.066521] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.730853} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.066906] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4/9d4b56df-11d9-4d94-94f3-6c5e27ea85f4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 660.067119] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 660.067375] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1627349b-5c05-4a31-85fa-9b9584128ff5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.074953] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 660.074953] env[68279]: value = "task-2962561" [ 660.074953] env[68279]: _type = "Task" [ 660.074953] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.084628] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962561, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.142386] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962559, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.154036] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962560, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.216071] env[68279]: DEBUG nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 660.389120] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.389704] env[68279]: DEBUG nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Instance network_info: |[{"id": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "address": "fa:16:3e:f1:ad:db", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcc683d-e5", "ovs_interfaceid": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 660.390408] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:ad:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dcc683d-e5ad-49a8-8e28-a1af77590026', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.405464] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating folder: Project (bd1384256d224e80bf6f25b9fd054376). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.406903] env[68279]: DEBUG nova.network.neutron [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Successfully created port: ee8f22d1-04e2-4f48-a66c-35de46d6f8ba {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.413922] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-488bf3e0-bd31-4535-82c5-019067ed0e6f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.445425] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created folder: Project (bd1384256d224e80bf6f25b9fd054376) in parent group-v594445. [ 660.445728] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating folder: Instances. Parent ref: group-v594501. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.446041] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-836a3ef5-ccec-4270-8212-83a0044a9f2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.458271] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created folder: Instances in parent group-v594501. [ 660.459799] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.460078] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 660.460305] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e608afe3-5039-43b0-ada8-0b01e3202dc7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.481479] env[68279]: DEBUG oslo_concurrency.lockutils [req-bee3ba36-071f-4dc6-ac3d-d2ceb44a820f req-d58c1c96-32d2-4d0f-9e84-ed08c0e1d9fd service nova] Releasing lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.488125] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.488125] env[68279]: value = "task-2962564" [ 660.488125] env[68279]: _type = "Task" [ 660.488125] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.499223] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962564, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.515860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "b40956fc-66f5-4bb6-8763-22465bb221bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.515911] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.516177] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.516464] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.516535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.519595] env[68279]: INFO nova.compute.manager [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Terminating instance [ 660.591289] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174993} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.592028] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 660.593234] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0724ee3d-e8c9-4bf1-96cc-b56a380ff8fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.625837] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4/9d4b56df-11d9-4d94-94f3-6c5e27ea85f4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 660.630666] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9d7a851-a9e5-439e-95da-1962e6ebb632 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.659350] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962560, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.856879} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.665992] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 336b7399-b64e-411f-99bc-ba0d292e371a/336b7399-b64e-411f-99bc-ba0d292e371a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 660.666306] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 660.666589] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962559, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.666860] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 660.666860] env[68279]: value = "task-2962565" [ 660.666860] env[68279]: _type = "Task" [ 660.666860] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.667257] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-073fb5d6-606a-45af-98ee-0336f7cf692c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.678277] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962565, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.684788] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 660.684788] env[68279]: value = "task-2962566" [ 660.684788] env[68279]: _type = "Task" [ 660.684788] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.695295] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962566, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.767836] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77719e0-8633-475b-9bf8-186814870b46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.786766] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance '6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 660.839572] env[68279]: DEBUG nova.network.neutron [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Updated VIF entry in instance network info cache for port 4fbdad72-dc39-4f81-80af-7eb2c9db8d4e. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 660.839946] env[68279]: DEBUG nova.network.neutron [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Updating instance_info_cache with network_info: [{"id": "4fbdad72-dc39-4f81-80af-7eb2c9db8d4e", "address": "fa:16:3e:a0:27:90", "network": {"id": "b6b288a3-f382-44fb-b835-e8cf4310b1d9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1046474361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab096093920e44a5b89eac4266233a62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fbdad72-dc", "ovs_interfaceid": "4fbdad72-dc39-4f81-80af-7eb2c9db8d4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.877498] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91439740-2b1a-4ce4-a94c-74a4e444b2c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.885753] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fc9383-3c61-43ca-8e87-2f0dcd7fe23e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.917839] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e235c23a-f76c-4d79-afbe-90b5acd4cea8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.926166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5378e6-c1fe-468c-8e2a-c7741cbaa862 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.941862] env[68279]: DEBUG nova.compute.provider_tree [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.000866] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962564, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.027644] env[68279]: DEBUG nova.compute.manager [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 661.027862] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.028799] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731435ce-9742-4c3a-a324-799dc9e49d91 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.037841] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.037841] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2956c67e-a865-44f8-ad40-e81c2e7accf6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.045950] env[68279]: DEBUG oslo_vmware.api [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 661.045950] env[68279]: value = "task-2962567" [ 661.045950] env[68279]: _type = "Task" [ 661.045950] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.054952] env[68279]: DEBUG oslo_vmware.api [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.155654] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962559, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.179089] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962565, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.193884] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962566, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092702} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.194169] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.195013] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2c0715-a289-4078-9e66-8526190762dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.217175] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 336b7399-b64e-411f-99bc-ba0d292e371a/336b7399-b64e-411f-99bc-ba0d292e371a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.217906] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f86c671a-595c-4ab7-9125-649f318f43e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.233021] env[68279]: DEBUG nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 661.242736] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 661.242736] env[68279]: value = "task-2962568" [ 661.242736] env[68279]: _type = "Task" [ 661.242736] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.256738] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962568, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.259340] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 661.259573] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 661.259707] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 661.259880] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 661.260032] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 661.260182] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 661.260382] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 661.260534] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 661.260690] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 661.260843] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 661.261013] env[68279]: DEBUG nova.virt.hardware [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 661.261822] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c3cf85-c006-40ef-bd57-479c9c5afcaf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.272401] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd344ba8-02c5-4095-a44f-857039d32c2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.294812] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.294812] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64f3ce42-3523-4093-b1ba-88271b2b6cd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.301976] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 661.301976] env[68279]: value = "task-2962569" [ 661.301976] env[68279]: _type = "Task" [ 661.301976] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.312243] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.343255] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Releasing lock "refresh_cache-9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.343570] env[68279]: DEBUG nova.compute.manager [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Received event network-vif-plugged-7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 661.344019] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Acquiring lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.344207] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.344250] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.344425] env[68279]: DEBUG nova.compute.manager [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] No waiting events found dispatching network-vif-plugged-7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 661.344619] env[68279]: WARNING nova.compute.manager [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Received unexpected event network-vif-plugged-7dcc683d-e5ad-49a8-8e28-a1af77590026 for instance with vm_state building and task_state spawning. [ 661.344876] env[68279]: DEBUG nova.compute.manager [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Received event network-changed-7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 661.345039] env[68279]: DEBUG nova.compute.manager [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Refreshing instance network info cache due to event network-changed-7dcc683d-e5ad-49a8-8e28-a1af77590026. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 661.345356] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Acquiring lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.345356] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Acquired lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.345573] env[68279]: DEBUG nova.network.neutron [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Refreshing network info cache for port 7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 661.444839] env[68279]: DEBUG nova.scheduler.client.report [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 661.503588] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962564, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.559531] env[68279]: DEBUG oslo_vmware.api [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962567, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.658026] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962559, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.679740] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962565, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.753460] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962568, 'name': ReconfigVM_Task, 'duration_secs': 0.318403} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.753847] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 336b7399-b64e-411f-99bc-ba0d292e371a/336b7399-b64e-411f-99bc-ba0d292e371a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 661.754506] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e63155a-82fb-47a7-8c6a-4d65360095af {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.762320] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 661.762320] env[68279]: value = "task-2962570" [ 661.762320] env[68279]: _type = "Task" [ 661.762320] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.773065] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962570, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.812888] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962569, 'name': PowerOffVM_Task, 'duration_secs': 0.371991} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.812888] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 661.812888] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance '6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 661.950665] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.951192] env[68279]: DEBUG nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 661.954144] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.790s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.955591] env[68279]: INFO nova.compute.claims [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.001673] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962564, 'name': CreateVM_Task, 'duration_secs': 1.15096} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.002361] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 662.003014] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.003193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.003509] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 662.003967] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65fe9b3a-d53b-4842-a5df-4175e9a7c9cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.009027] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 662.009027] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a2f50-2574-6996-2263-f5acd4f16b06" [ 662.009027] env[68279]: _type = "Task" [ 662.009027] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.018911] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a2f50-2574-6996-2263-f5acd4f16b06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.062494] env[68279]: DEBUG oslo_vmware.api [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962567, 'name': PowerOffVM_Task, 'duration_secs': 0.612334} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.062764] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 662.063183] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 662.063442] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1a344e5-d740-4e43-b899-d224a35335f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.069029] env[68279]: DEBUG nova.network.neutron [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updated VIF entry in instance network info cache for port 7dcc683d-e5ad-49a8-8e28-a1af77590026. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 662.069391] env[68279]: DEBUG nova.network.neutron [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updating instance_info_cache with network_info: [{"id": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "address": "fa:16:3e:f1:ad:db", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcc683d-e5", "ovs_interfaceid": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.159155] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962559, 'name': CloneVM_Task, 'duration_secs': 2.227468} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.159658] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Created linked-clone VM from snapshot [ 662.160531] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac440605-6f68-4857-9c84-cf39a85ce872 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.169277] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Uploading image 90945600-cd0e-4e27-b4d0-bd15db2c3655 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 662.181259] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962565, 'name': ReconfigVM_Task, 'duration_secs': 1.025372} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.182214] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4/9d4b56df-11d9-4d94-94f3-6c5e27ea85f4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.183160] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1803547-6601-4211-b4f7-e64fd22b8a52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.193021] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 662.193021] env[68279]: value = "task-2962572" [ 662.193021] env[68279]: _type = "Task" [ 662.193021] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.193021] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 662.193021] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 662.193021] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Deleting the datastore file [datastore1] b40956fc-66f5-4bb6-8763-22465bb221bf {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 662.198846] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bf8d42b-8062-4c1b-8d32-6f2d1bfff44d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.210941] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962572, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.213503] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 662.213503] env[68279]: value = "vm-594500" [ 662.213503] env[68279]: _type = "VirtualMachine" [ 662.213503] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 662.216075] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c8d52d5b-3429-45ec-97ce-d48bbb1f3671 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.218085] env[68279]: DEBUG oslo_vmware.api [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 662.218085] env[68279]: value = "task-2962573" [ 662.218085] env[68279]: _type = "Task" [ 662.218085] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.225687] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lease: (returnval){ [ 662.225687] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f34d1f-a039-9d41-01b9-cbb6ae4bf867" [ 662.225687] env[68279]: _type = "HttpNfcLease" [ 662.225687] env[68279]: } obtained for exporting VM: (result){ [ 662.225687] env[68279]: value = "vm-594500" [ 662.225687] env[68279]: _type = "VirtualMachine" [ 662.225687] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 662.226427] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the lease: (returnval){ [ 662.226427] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f34d1f-a039-9d41-01b9-cbb6ae4bf867" [ 662.226427] env[68279]: _type = "HttpNfcLease" [ 662.226427] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 662.230905] env[68279]: DEBUG oslo_vmware.api [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962573, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.237930] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 662.237930] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f34d1f-a039-9d41-01b9-cbb6ae4bf867" [ 662.237930] env[68279]: _type = "HttpNfcLease" [ 662.237930] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 662.273611] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962570, 'name': Rename_Task, 'duration_secs': 0.143477} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.274068] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.274427] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7d68b28-27ca-4fd0-91e9-9eb05cb00bbb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.283240] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 662.283240] env[68279]: value = "task-2962575" [ 662.283240] env[68279]: _type = "Task" [ 662.283240] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.291533] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962575, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.320021] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.320021] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.320021] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.320021] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.320516] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.322158] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.322158] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.322158] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.322158] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.322158] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.322158] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.328115] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a34778e-1f60-42c3-b38d-98ea9e05526b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.345298] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 662.345298] env[68279]: value = "task-2962576" [ 662.345298] env[68279]: _type = "Task" [ 662.345298] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.358779] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.412884] env[68279]: DEBUG nova.network.neutron [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Successfully updated port: ee8f22d1-04e2-4f48-a66c-35de46d6f8ba {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.465444] env[68279]: DEBUG nova.compute.utils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 662.469085] env[68279]: DEBUG nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 662.470059] env[68279]: DEBUG nova.network.neutron [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 662.521268] env[68279]: DEBUG nova.policy [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6dcff6c11546f9b0907917a2463755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbad607de614a809c51668c2ac0d012', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 662.524092] env[68279]: DEBUG nova.compute.manager [req-d5f1f559-df86-4514-8992-7e0cc3b8a99b req-04804cf9-8dea-4ce2-a087-2f8bdee34c80 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Received event network-vif-plugged-ee8f22d1-04e2-4f48-a66c-35de46d6f8ba {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.524303] env[68279]: DEBUG oslo_concurrency.lockutils [req-d5f1f559-df86-4514-8992-7e0cc3b8a99b req-04804cf9-8dea-4ce2-a087-2f8bdee34c80 service nova] Acquiring lock "deea2dea-1860-45a0-9637-ced09bb51b81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.524640] env[68279]: DEBUG oslo_concurrency.lockutils [req-d5f1f559-df86-4514-8992-7e0cc3b8a99b req-04804cf9-8dea-4ce2-a087-2f8bdee34c80 service nova] Lock "deea2dea-1860-45a0-9637-ced09bb51b81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.525304] env[68279]: DEBUG oslo_concurrency.lockutils [req-d5f1f559-df86-4514-8992-7e0cc3b8a99b req-04804cf9-8dea-4ce2-a087-2f8bdee34c80 service nova] Lock "deea2dea-1860-45a0-9637-ced09bb51b81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.525304] env[68279]: DEBUG nova.compute.manager [req-d5f1f559-df86-4514-8992-7e0cc3b8a99b req-04804cf9-8dea-4ce2-a087-2f8bdee34c80 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] No waiting events found dispatching network-vif-plugged-ee8f22d1-04e2-4f48-a66c-35de46d6f8ba {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 662.525304] env[68279]: WARNING nova.compute.manager [req-d5f1f559-df86-4514-8992-7e0cc3b8a99b req-04804cf9-8dea-4ce2-a087-2f8bdee34c80 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Received unexpected event network-vif-plugged-ee8f22d1-04e2-4f48-a66c-35de46d6f8ba for instance with vm_state building and task_state spawning. [ 662.533070] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a2f50-2574-6996-2263-f5acd4f16b06, 'name': SearchDatastore_Task, 'duration_secs': 0.01044} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.533705] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.533705] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 662.533845] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.534025] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.534228] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.534660] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0bfe182c-34db-41da-9d30-e16d3a28b496 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.547132] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.547354] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 662.550821] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce7c4e3e-ec8d-4587-bb4c-fd435ab30e88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.562443] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 662.562443] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aac1e9-b45c-dc3f-d9ca-3d31b454bd8c" [ 662.562443] env[68279]: _type = "Task" [ 662.562443] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.571813] env[68279]: DEBUG oslo_concurrency.lockutils [req-12ec3d2e-fe05-442f-bdef-8864359f344f req-ce22cf36-088a-4bad-8eb6-4ee6de3bb56e service nova] Releasing lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.575868] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aac1e9-b45c-dc3f-d9ca-3d31b454bd8c, 'name': SearchDatastore_Task, 'duration_secs': 0.011971} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.576874] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25d17240-663d-4c16-9e32-273a4b2a22e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.582604] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 662.582604] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a6caff-e2de-213a-d89a-3effd21e7f16" [ 662.582604] env[68279]: _type = "Task" [ 662.582604] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.592219] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a6caff-e2de-213a-d89a-3effd21e7f16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.704772] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962572, 'name': Rename_Task, 'duration_secs': 0.170874} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.704772] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.705204] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6824c317-4649-432f-9a26-3e4a37496cce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.713972] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 662.713972] env[68279]: value = "task-2962577" [ 662.713972] env[68279]: _type = "Task" [ 662.713972] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.727351] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.738755] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 662.738755] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f34d1f-a039-9d41-01b9-cbb6ae4bf867" [ 662.738755] env[68279]: _type = "HttpNfcLease" [ 662.738755] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 662.742122] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 662.742122] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f34d1f-a039-9d41-01b9-cbb6ae4bf867" [ 662.742122] env[68279]: _type = "HttpNfcLease" [ 662.742122] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 662.742442] env[68279]: DEBUG oslo_vmware.api [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962573, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225394} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.743232] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb100ba-9e6c-4f5b-9477-54856d81488a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.745900] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 662.746080] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 662.746470] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.746608] env[68279]: INFO nova.compute.manager [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Took 1.72 seconds to destroy the instance on the hypervisor. [ 662.746853] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 662.747063] env[68279]: DEBUG nova.compute.manager [-] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 662.747158] env[68279]: DEBUG nova.network.neutron [-] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 662.757602] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da37df-2c25-56ab-ed6e-47518b8f28ed/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 662.757805] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da37df-2c25-56ab-ed6e-47518b8f28ed/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 662.834713] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962575, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.855793] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962576, 'name': ReconfigVM_Task, 'duration_secs': 0.223997} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.856108] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance '6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 662.877524] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-54df41af-aff2-43dc-823b-c02748d63250 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.924299] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "refresh_cache-deea2dea-1860-45a0-9637-ced09bb51b81" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.924299] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquired lock "refresh_cache-deea2dea-1860-45a0-9637-ced09bb51b81" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.924299] env[68279]: DEBUG nova.network.neutron [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.974433] env[68279]: DEBUG nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 663.083414] env[68279]: DEBUG nova.network.neutron [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Successfully created port: 103f3058-f969-4e2c-bb38-2c0fa06ba731 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.098064] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a6caff-e2de-213a-d89a-3effd21e7f16, 'name': SearchDatastore_Task, 'duration_secs': 0.014527} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.098064] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.098340] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1/298d3bc2-1fad-481f-993b-8d0dc9ed1ed1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 663.098607] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8276510a-08bf-466e-a582-6c299f1a3cde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.114749] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 663.114749] env[68279]: value = "task-2962578" [ 663.114749] env[68279]: _type = "Task" [ 663.114749] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.125873] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.226057] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962577, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.339952] env[68279]: DEBUG oslo_vmware.api [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2962575, 'name': PowerOnVM_Task, 'duration_secs': 0.593273} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.340412] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.340684] env[68279]: INFO nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Took 9.91 seconds to spawn the instance on the hypervisor. [ 663.340918] env[68279]: DEBUG nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.341817] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9515c1d8-e068-414f-a615-dbbe97a59d48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.362529] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 663.362973] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.363281] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 663.363573] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.363819] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 663.364085] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 663.364411] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 663.364678] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 663.364949] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 663.365258] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 663.365544] env[68279]: DEBUG nova.virt.hardware [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 663.371635] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Reconfiguring VM instance instance-00000006 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 663.377228] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7af85591-f141-478f-9d1e-485beb65b754 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.399153] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 663.399153] env[68279]: value = "task-2962579" [ 663.399153] env[68279]: _type = "Task" [ 663.399153] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.411478] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.522060] env[68279]: DEBUG nova.network.neutron [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.556362] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa830896-c68d-4a85-9126-a565516f69fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.574906] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbb4b37-818b-41d9-b094-e8027082378f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.620145] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6c3e6e-7fee-4821-a585-157b2f9d11be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.631126] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962578, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.639254] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76c2476-814f-4d4e-b46c-0f0c4489a003 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.659446] env[68279]: DEBUG nova.compute.provider_tree [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.729059] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962577, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.756441] env[68279]: DEBUG nova.compute.manager [req-ff9b2cc7-f010-4720-a61b-996d59601e48 req-ec50d452-c6e6-4595-a754-6ff326af5407 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-vif-deleted-0622e137-f260-4168-8beb-bd4491038e88 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 663.756647] env[68279]: INFO nova.compute.manager [req-ff9b2cc7-f010-4720-a61b-996d59601e48 req-ec50d452-c6e6-4595-a754-6ff326af5407 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Neutron deleted interface 0622e137-f260-4168-8beb-bd4491038e88; detaching it from the instance and deleting it from the info cache [ 663.756928] env[68279]: DEBUG nova.network.neutron [req-ff9b2cc7-f010-4720-a61b-996d59601e48 req-ec50d452-c6e6-4595-a754-6ff326af5407 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updating instance_info_cache with network_info: [{"id": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "address": "fa:16:3e:07:16:ac", "network": {"id": "f142e087-b46c-4e41-a7b8-de950b518d41", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755266048", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fb9edf-55", "ovs_interfaceid": "98fb9edf-552e-4a39-8b5b-4b81ddaf69b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "address": "fa:16:3e:7b:7d:a5", "network": {"id": "787a4f45-abc2-4348-9530-d63c0cc751d4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-465018129", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a927c2-f8", "ovs_interfaceid": "83a927c2-f8a2-4b72-a78e-a206cc03e8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.792900] env[68279]: DEBUG nova.network.neutron [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Updating instance_info_cache with network_info: [{"id": "ee8f22d1-04e2-4f48-a66c-35de46d6f8ba", "address": "fa:16:3e:6e:34:b3", "network": {"id": "b8959108-36cb-41e4-804f-4e86d1d56368", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1877664351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "90ec938e6c384c4f96779e4a0e091a80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee8f22d1-04", "ovs_interfaceid": "ee8f22d1-04e2-4f48-a66c-35de46d6f8ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.875654] env[68279]: INFO nova.compute.manager [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Took 29.09 seconds to build instance. [ 663.911234] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962579, 'name': ReconfigVM_Task, 'duration_secs': 0.299245} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.911565] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Reconfigured VM instance instance-00000006 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 663.912626] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793037e6-c9e8-48d9-934d-89d51cc3c211 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.939847] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1/6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 663.940848] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ae09414-5cef-480d-a563-4a9597f3f9bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.962074] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 663.962074] env[68279]: value = "task-2962580" [ 663.962074] env[68279]: _type = "Task" [ 663.962074] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.971723] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962580, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.981188] env[68279]: DEBUG nova.network.neutron [-] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.986160] env[68279]: DEBUG nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 664.014098] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 664.014363] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.014746] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 664.014946] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.015110] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 664.015343] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 664.015691] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 664.015903] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 664.016249] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 664.016538] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 664.016634] env[68279]: DEBUG nova.virt.hardware [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 664.018304] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b534f55f-76b2-4a6d-a684-9aa482275f61 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.028503] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0598591-003a-4ef6-905c-411c8cb2334e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.129812] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671082} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.130383] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1/298d3bc2-1fad-481f-993b-8d0dc9ed1ed1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 664.130959] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.132941] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6548de6b-0303-4681-8e99-c760ce6269df {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.144365] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 664.144365] env[68279]: value = "task-2962581" [ 664.144365] env[68279]: _type = "Task" [ 664.144365] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.154878] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.162208] env[68279]: DEBUG nova.scheduler.client.report [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.227321] env[68279]: DEBUG oslo_vmware.api [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962577, 'name': PowerOnVM_Task, 'duration_secs': 1.438104} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.227703] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 664.228025] env[68279]: INFO nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Took 13.41 seconds to spawn the instance on the hypervisor. [ 664.228334] env[68279]: DEBUG nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.229253] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c348c2-8458-4074-ad24-ee0984bfd64a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.260788] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e2e75e8-9505-442c-b1dc-83dcc7b0feb4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.275663] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76353ac7-dcfc-48a7-a381-0e98bb0855dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.298265] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Releasing lock "refresh_cache-deea2dea-1860-45a0-9637-ced09bb51b81" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.298265] env[68279]: DEBUG nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Instance network_info: |[{"id": "ee8f22d1-04e2-4f48-a66c-35de46d6f8ba", "address": "fa:16:3e:6e:34:b3", "network": {"id": "b8959108-36cb-41e4-804f-4e86d1d56368", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1877664351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "90ec938e6c384c4f96779e4a0e091a80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee8f22d1-04", "ovs_interfaceid": "ee8f22d1-04e2-4f48-a66c-35de46d6f8ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 664.298265] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:34:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd298db54-f13d-4bf6-b6c2-755074b3047f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee8f22d1-04e2-4f48-a66c-35de46d6f8ba', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.309301] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Creating folder: Project (90ec938e6c384c4f96779e4a0e091a80). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.323592] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9bf2a09-dbaa-4837-8369-18b621ef5ec1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.325731] env[68279]: DEBUG nova.compute.manager [req-ff9b2cc7-f010-4720-a61b-996d59601e48 req-ec50d452-c6e6-4595-a754-6ff326af5407 service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Detach interface failed, port_id=0622e137-f260-4168-8beb-bd4491038e88, reason: Instance b40956fc-66f5-4bb6-8763-22465bb221bf could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 664.336854] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Created folder: Project (90ec938e6c384c4f96779e4a0e091a80) in parent group-v594445. [ 664.337159] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Creating folder: Instances. Parent ref: group-v594504. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.337506] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6415b696-1360-487e-97ea-c5e18c11bb00 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.349565] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Created folder: Instances in parent group-v594504. [ 664.349810] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.349998] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 664.350225] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35bfbbfa-f8f1-487b-8fec-dfd510d820e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.373834] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.373834] env[68279]: value = "task-2962584" [ 664.373834] env[68279]: _type = "Task" [ 664.373834] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.377789] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2db60c3-d56d-4f42-b035-9cea170d91c7 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "336b7399-b64e-411f-99bc-ba0d292e371a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.305s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.389864] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962584, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.473167] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962580, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.483948] env[68279]: INFO nova.compute.manager [-] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Took 1.74 seconds to deallocate network for instance. [ 664.656348] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14766} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.656748] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.657717] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e8bb46-8594-4554-b0f8-214b6f8e6475 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.681032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.682212] env[68279]: DEBUG nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 664.695895] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1/298d3bc2-1fad-481f-993b-8d0dc9ed1ed1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.696292] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.422s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.697925] env[68279]: INFO nova.compute.claims [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.702037] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f3823d2-714e-4319-8d25-bb8e30499306 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.730188] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 664.730188] env[68279]: value = "task-2962585" [ 664.730188] env[68279]: _type = "Task" [ 664.730188] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.754460] env[68279]: INFO nova.compute.manager [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Took 30.00 seconds to build instance. [ 664.759025] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962585, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.881247] env[68279]: DEBUG nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.891141] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962584, 'name': CreateVM_Task, 'duration_secs': 0.458882} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.891141] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 664.891141] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.891141] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.891141] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 664.891141] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d868c5c7-2ba1-4cd0-88ee-c2b447329ccd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.899804] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 664.899804] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3e5c4-0efd-91c5-7dbd-c732bdc68194" [ 664.899804] env[68279]: _type = "Task" [ 664.899804] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.909500] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3e5c4-0efd-91c5-7dbd-c732bdc68194, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.974813] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962580, 'name': ReconfigVM_Task, 'duration_secs': 0.581938} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.975746] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1/6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 664.975746] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance '6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 664.992433] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.108403] env[68279]: DEBUG nova.compute.manager [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Received event network-changed-ee8f22d1-04e2-4f48-a66c-35de46d6f8ba {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.108600] env[68279]: DEBUG nova.compute.manager [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Refreshing instance network info cache due to event network-changed-ee8f22d1-04e2-4f48-a66c-35de46d6f8ba. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 665.108807] env[68279]: DEBUG oslo_concurrency.lockutils [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] Acquiring lock "refresh_cache-deea2dea-1860-45a0-9637-ced09bb51b81" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.108944] env[68279]: DEBUG oslo_concurrency.lockutils [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] Acquired lock "refresh_cache-deea2dea-1860-45a0-9637-ced09bb51b81" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.110190] env[68279]: DEBUG nova.network.neutron [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Refreshing network info cache for port ee8f22d1-04e2-4f48-a66c-35de46d6f8ba {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 665.197746] env[68279]: DEBUG nova.compute.utils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.199439] env[68279]: DEBUG nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 665.199607] env[68279]: DEBUG nova.network.neutron [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.241337] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962585, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.257852] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b74f6185-5144-4f9a-a91e-a46600f5ba26 tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.518s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.416314] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3e5c4-0efd-91c5-7dbd-c732bdc68194, 'name': SearchDatastore_Task, 'duration_secs': 0.013355} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.417350] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.417894] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.418128] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.418466] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.418608] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.418782] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.419142] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a4bf239-fcdb-4e1d-9ea5-1793f2c3408a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.430172] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.430385] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.431166] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c6ccf44-b734-4920-be89-db6cb5f4f8f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.437481] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 665.437481] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1052d-eb33-4752-41d0-c2cbc33ae5ec" [ 665.437481] env[68279]: _type = "Task" [ 665.437481] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.447877] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1052d-eb33-4752-41d0-c2cbc33ae5ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.483678] env[68279]: DEBUG nova.policy [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d12c43445684388939cc7ff0910c462', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4aef4bec1d68418390ba922a536e5712', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 665.486128] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec22038b-9c10-45a7-a872-11c7b83b43f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.516705] env[68279]: DEBUG nova.network.neutron [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Successfully updated port: 103f3058-f969-4e2c-bb38-2c0fa06ba731 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.519050] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38200deb-d8f6-4f57-8f1f-adf68682417b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.543631] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance '6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 665.704812] env[68279]: DEBUG nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 665.745107] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962585, 'name': ReconfigVM_Task, 'duration_secs': 0.54229} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.746586] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1/298d3bc2-1fad-481f-993b-8d0dc9ed1ed1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.747464] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10f75f64-96e0-4891-afba-ca49f33e1c1e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.766191] env[68279]: DEBUG nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 665.770736] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 665.770736] env[68279]: value = "task-2962586" [ 665.770736] env[68279]: _type = "Task" [ 665.770736] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.774307] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "4e157792-f910-492c-ab29-dd3f86cb96a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.774594] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.785812] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962586, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.960948] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1052d-eb33-4752-41d0-c2cbc33ae5ec, 'name': SearchDatastore_Task, 'duration_secs': 0.013411} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.962763] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-108e6ec6-8e34-40f1-9d76-5a7826e6dab0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.972178] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 665.972178] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522c7a84-9e87-631e-b4fe-3ab8160731e7" [ 665.972178] env[68279]: _type = "Task" [ 665.972178] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.988780] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522c7a84-9e87-631e-b4fe-3ab8160731e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.024813] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "refresh_cache-92786813-f4ab-4ff7-8597-aa1aa90eeb01" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.025198] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "refresh_cache-92786813-f4ab-4ff7-8597-aa1aa90eeb01" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.025474] env[68279]: DEBUG nova.network.neutron [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.107103] env[68279]: DEBUG nova.network.neutron [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Port 16b424ba-6749-431c-bdc5-22c910ad0fe6 binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 666.166949] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.168047] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.291841] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962586, 'name': Rename_Task, 'duration_secs': 0.231971} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.292470] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.292729] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c7a3875-d486-4735-8c78-028ab8453972 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.298182] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.301626] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 666.301626] env[68279]: value = "task-2962587" [ 666.301626] env[68279]: _type = "Task" [ 666.301626] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.314546] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.340226] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfedea3-3033-4134-a61d-6f3dbed55877 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.349351] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a43be66-d7f5-42a3-a622-b3003110e483 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.381931] env[68279]: DEBUG nova.network.neutron [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Updated VIF entry in instance network info cache for port ee8f22d1-04e2-4f48-a66c-35de46d6f8ba. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 666.382307] env[68279]: DEBUG nova.network.neutron [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Updating instance_info_cache with network_info: [{"id": "ee8f22d1-04e2-4f48-a66c-35de46d6f8ba", "address": "fa:16:3e:6e:34:b3", "network": {"id": "b8959108-36cb-41e4-804f-4e86d1d56368", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1877664351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "90ec938e6c384c4f96779e4a0e091a80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee8f22d1-04", "ovs_interfaceid": "ee8f22d1-04e2-4f48-a66c-35de46d6f8ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.386975] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549e2863-c84c-4062-8941-aa2fecb7a13c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.394332] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e45c4e-f5ba-4b39-8c63-a8b351af1fe3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.411774] env[68279]: DEBUG nova.compute.provider_tree [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.490505] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522c7a84-9e87-631e-b4fe-3ab8160731e7, 'name': SearchDatastore_Task, 'duration_secs': 0.014576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.490795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.491062] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] deea2dea-1860-45a0-9637-ced09bb51b81/deea2dea-1860-45a0-9637-ced09bb51b81.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 666.491341] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37749afd-9cd1-4f57-953f-08425093d84f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.501461] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 666.501461] env[68279]: value = "task-2962588" [ 666.501461] env[68279]: _type = "Task" [ 666.501461] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.513553] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.526079] env[68279]: DEBUG nova.network.neutron [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Successfully created port: 5486afba-2c1e-409f-8d28-00f3c857d1a3 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.617718] env[68279]: DEBUG nova.network.neutron [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.678346] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.678919] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.678919] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.679049] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.679585] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.679585] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.679585] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 666.679737] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.716288] env[68279]: DEBUG nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 666.755104] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 666.755842] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 666.755842] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 666.755842] env[68279]: DEBUG nova.virt.hardware [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 666.756587] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02df8cb6-03fb-4a18-b8ec-5ac4ab1370fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.768243] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737bd6eb-96f3-4715-bb33-a06aa32b7016 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.814440] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962587, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.887848] env[68279]: DEBUG oslo_concurrency.lockutils [req-6e4c8208-ef85-47fb-ba57-d2e88a235124 req-faa93ee1-cf30-40ef-8b9e-7f61cc0ade53 service nova] Releasing lock "refresh_cache-deea2dea-1860-45a0-9637-ced09bb51b81" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.898573] env[68279]: DEBUG nova.compute.manager [req-b2a378dc-1001-4c44-a604-2dbaf53107f6 req-dd3bf4f2-7544-4953-83d4-78d294e2fcba service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-vif-deleted-83a927c2-f8a2-4b72-a78e-a206cc03e8d8 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 666.898573] env[68279]: DEBUG nova.compute.manager [req-b2a378dc-1001-4c44-a604-2dbaf53107f6 req-dd3bf4f2-7544-4953-83d4-78d294e2fcba service nova] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Received event network-vif-deleted-98fb9edf-552e-4a39-8b5b-4b81ddaf69b6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 666.916181] env[68279]: DEBUG nova.network.neutron [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Updating instance_info_cache with network_info: [{"id": "103f3058-f969-4e2c-bb38-2c0fa06ba731", "address": "fa:16:3e:e0:17:60", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap103f3058-f9", "ovs_interfaceid": "103f3058-f969-4e2c-bb38-2c0fa06ba731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.916879] env[68279]: DEBUG nova.scheduler.client.report [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.017807] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962588, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.135840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.135840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.135840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.182857] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.317292] env[68279]: DEBUG oslo_vmware.api [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2962587, 'name': PowerOnVM_Task, 'duration_secs': 0.826854} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.317292] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.317292] env[68279]: INFO nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Took 11.30 seconds to spawn the instance on the hypervisor. [ 667.317292] env[68279]: DEBUG nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.317292] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3196cb31-e549-4701-9f93-afc653648480 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.424085] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "refresh_cache-92786813-f4ab-4ff7-8597-aa1aa90eeb01" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.424450] env[68279]: DEBUG nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Instance network_info: |[{"id": "103f3058-f969-4e2c-bb38-2c0fa06ba731", "address": "fa:16:3e:e0:17:60", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap103f3058-f9", "ovs_interfaceid": "103f3058-f969-4e2c-bb38-2c0fa06ba731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.425204] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.425675] env[68279]: DEBUG nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 667.428504] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:17:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '103f3058-f969-4e2c-bb38-2c0fa06ba731', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.437737] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating folder: Project (9dbad607de614a809c51668c2ac0d012). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.437737] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.514s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.437737] env[68279]: DEBUG nova.objects.instance [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lazy-loading 'resources' on Instance uuid c9bda338-6c7d-4850-8f46-7cd916372ac9 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 667.439456] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8f22291-494e-4713-9488-e9c85f5f6f3b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.455354] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created folder: Project (9dbad607de614a809c51668c2ac0d012) in parent group-v594445. [ 667.455588] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating folder: Instances. Parent ref: group-v594507. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.456158] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6c194c5-ce46-41c5-b45d-17016cca752c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.468862] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created folder: Instances in parent group-v594507. [ 667.469126] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.469369] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.470049] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-955a77c0-6feb-4f6c-8055-227e615f2519 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.491545] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.491545] env[68279]: value = "task-2962591" [ 667.491545] env[68279]: _type = "Task" [ 667.491545] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.501229] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962591, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.511921] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962588, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572608} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.511921] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] deea2dea-1860-45a0-9637-ced09bb51b81/deea2dea-1860-45a0-9637-ced09bb51b81.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 667.512125] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 667.512385] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dba48f63-e113-4214-a96e-0eb17b4af883 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.520584] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 667.520584] env[68279]: value = "task-2962592" [ 667.520584] env[68279]: _type = "Task" [ 667.520584] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.531703] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962592, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.615677] env[68279]: DEBUG nova.compute.manager [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Received event network-vif-plugged-103f3058-f969-4e2c-bb38-2c0fa06ba731 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.616510] env[68279]: DEBUG oslo_concurrency.lockutils [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] Acquiring lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.616815] env[68279]: DEBUG oslo_concurrency.lockutils [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.617085] env[68279]: DEBUG oslo_concurrency.lockutils [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.617254] env[68279]: DEBUG nova.compute.manager [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] No waiting events found dispatching network-vif-plugged-103f3058-f969-4e2c-bb38-2c0fa06ba731 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 667.617434] env[68279]: WARNING nova.compute.manager [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Received unexpected event network-vif-plugged-103f3058-f969-4e2c-bb38-2c0fa06ba731 for instance with vm_state building and task_state spawning. [ 667.618153] env[68279]: DEBUG nova.compute.manager [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Received event network-changed-103f3058-f969-4e2c-bb38-2c0fa06ba731 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.618294] env[68279]: DEBUG nova.compute.manager [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Refreshing instance network info cache due to event network-changed-103f3058-f969-4e2c-bb38-2c0fa06ba731. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 667.619066] env[68279]: DEBUG oslo_concurrency.lockutils [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] Acquiring lock "refresh_cache-92786813-f4ab-4ff7-8597-aa1aa90eeb01" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.619066] env[68279]: DEBUG oslo_concurrency.lockutils [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] Acquired lock "refresh_cache-92786813-f4ab-4ff7-8597-aa1aa90eeb01" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.619066] env[68279]: DEBUG nova.network.neutron [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Refreshing network info cache for port 103f3058-f969-4e2c-bb38-2c0fa06ba731 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.840266] env[68279]: INFO nova.compute.manager [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Took 29.20 seconds to build instance. [ 667.944026] env[68279]: DEBUG nova.compute.utils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 667.946749] env[68279]: DEBUG nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 667.947257] env[68279]: DEBUG nova.network.neutron [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.008196] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962591, 'name': CreateVM_Task, 'duration_secs': 0.488034} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.008903] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.010507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.010671] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.011014] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.011294] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab447bd5-dae2-4567-9a6c-6a9f6fe49ad0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.020904] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 668.020904] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52de11d8-7f41-9119-4929-cc4c75f569e5" [ 668.020904] env[68279]: _type = "Task" [ 668.020904] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.043082] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52de11d8-7f41-9119-4929-cc4c75f569e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.043082] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962592, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103289} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.044451] env[68279]: DEBUG nova.policy [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49483f51e2634b0385fb11abca58ade1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34fd2747aeac4bcd9dd18075cf4ebd8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 668.046174] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.047154] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7c5f84-bf00-4251-a7de-4896d4e58e85 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.074638] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] deea2dea-1860-45a0-9637-ced09bb51b81/deea2dea-1860-45a0-9637-ced09bb51b81.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.081049] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dfa27f9-a887-4b35-aed4-8fc21656f5a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.104404] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 668.104404] env[68279]: value = "task-2962593" [ 668.104404] env[68279]: _type = "Task" [ 668.104404] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.119184] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962593, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.220356] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.222024] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.222024] env[68279]: DEBUG nova.network.neutron [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.327187] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.327421] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.328239] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.328482] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.331032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.331032] env[68279]: INFO nova.compute.manager [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Terminating instance [ 668.344563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ae8330-4874-4b61-bcd6-82b834c701d9 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.730s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.450742] env[68279]: DEBUG nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.492153] env[68279]: DEBUG nova.network.neutron [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Updated VIF entry in instance network info cache for port 103f3058-f969-4e2c-bb38-2c0fa06ba731. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 668.492805] env[68279]: DEBUG nova.network.neutron [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Updating instance_info_cache with network_info: [{"id": "103f3058-f969-4e2c-bb38-2c0fa06ba731", "address": "fa:16:3e:e0:17:60", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap103f3058-f9", "ovs_interfaceid": "103f3058-f969-4e2c-bb38-2c0fa06ba731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.541461] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52de11d8-7f41-9119-4929-cc4c75f569e5, 'name': SearchDatastore_Task, 'duration_secs': 0.016879} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.545534] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.545634] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.545854] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.546009] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.546194] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 668.548242] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2bcba51-d973-4f5f-9d25-f4b197e7f950 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.558581] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 668.559027] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 668.559516] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0571fd51-5c9e-442a-ba78-e090bdf40130 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.571528] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 668.571528] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f0551d-760d-4c11-a934-1887c1077eeb" [ 668.571528] env[68279]: _type = "Task" [ 668.571528] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.585322] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f0551d-760d-4c11-a934-1887c1077eeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.616570] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962593, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.618593] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3564f8-fe74-4356-b3c0-b0d3c60c8e21 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.627616] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcd5b90-5265-4e0f-9eec-599486c05449 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.662470] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf64963-aa92-4d3c-995b-b2173e53fb62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.673041] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da206dd-b5e3-4089-b78b-0848993404fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.687335] env[68279]: DEBUG nova.compute.provider_tree [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.835351] env[68279]: DEBUG nova.compute.manager [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 668.835542] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.836532] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b161c706-7175-46c2-ac15-2aa311351350 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.845463] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.845806] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e5c4128-1578-45c6-8533-044f69c09e9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.851142] env[68279]: DEBUG nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 668.855349] env[68279]: DEBUG oslo_vmware.api [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 668.855349] env[68279]: value = "task-2962594" [ 668.855349] env[68279]: _type = "Task" [ 668.855349] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.867117] env[68279]: DEBUG oslo_vmware.api [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.945515] env[68279]: DEBUG nova.network.neutron [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Successfully updated port: 5486afba-2c1e-409f-8d28-00f3c857d1a3 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 668.989260] env[68279]: DEBUG nova.network.neutron [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Successfully created port: 15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.995515] env[68279]: DEBUG oslo_concurrency.lockutils [req-33e78538-2102-462a-a9c3-5375d5e6f6f8 req-8ad10e14-e150-434e-8129-e6bcd3c2348a service nova] Releasing lock "refresh_cache-92786813-f4ab-4ff7-8597-aa1aa90eeb01" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.008820] env[68279]: DEBUG nova.network.neutron [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance_info_cache with network_info: [{"id": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "address": "fa:16:3e:00:83:88", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b424ba-67", "ovs_interfaceid": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.083360] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f0551d-760d-4c11-a934-1887c1077eeb, 'name': SearchDatastore_Task, 'duration_secs': 0.015546} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.084384] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-119ec604-ae90-4172-917d-7f9d52856401 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.094037] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 669.094037] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214f981-1584-e12a-af93-2b342a6378f9" [ 669.094037] env[68279]: _type = "Task" [ 669.094037] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.103816] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214f981-1584-e12a-af93-2b342a6378f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.119093] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962593, 'name': ReconfigVM_Task, 'duration_secs': 0.682782} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.119379] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Reconfigured VM instance instance-00000014 to attach disk [datastore1] deea2dea-1860-45a0-9637-ced09bb51b81/deea2dea-1860-45a0-9637-ced09bb51b81.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.120070] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e9eae02-5142-4b21-b867-a20852c638f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.128738] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 669.128738] env[68279]: value = "task-2962595" [ 669.128738] env[68279]: _type = "Task" [ 669.128738] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.141148] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962595, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.191719] env[68279]: DEBUG nova.scheduler.client.report [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.367848] env[68279]: DEBUG oslo_vmware.api [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962594, 'name': PowerOffVM_Task, 'duration_secs': 0.337573} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.370514] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.370708] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.371262] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d51ed8b-a444-4880-bfbf-6d15886db729 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.389925] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.444736] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 669.444883] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 669.444923] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Deleting the datastore file [datastore1] 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.446034] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1516e31-d4d4-41fd-a873-ffc7c149770f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.453418] env[68279]: DEBUG oslo_vmware.api [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for the task: (returnval){ [ 669.453418] env[68279]: value = "task-2962597" [ 669.453418] env[68279]: _type = "Task" [ 669.453418] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.458189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "refresh_cache-11c439ab-e27c-43e6-b752-c90af5f84bc1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.458366] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired lock "refresh_cache-11c439ab-e27c-43e6-b752-c90af5f84bc1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.458599] env[68279]: DEBUG nova.network.neutron [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.468880] env[68279]: DEBUG nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 669.471423] env[68279]: DEBUG oslo_vmware.api [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.495360] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 669.495832] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.495832] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.495966] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.496068] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.496339] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 669.496561] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 669.496711] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 669.496871] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 669.497122] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 669.497299] env[68279]: DEBUG nova.virt.hardware [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.498528] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384dfa29-cb71-45fc-96b9-8dde1efe939b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.507687] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9df691-7ebe-46a5-bd50-d59f64020f72 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.515130] env[68279]: DEBUG oslo_concurrency.lockutils [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.603753] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214f981-1584-e12a-af93-2b342a6378f9, 'name': SearchDatastore_Task, 'duration_secs': 0.015591} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.604043] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.604492] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 92786813-f4ab-4ff7-8597-aa1aa90eeb01/92786813-f4ab-4ff7-8597-aa1aa90eeb01.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 669.604710] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f327112-0d06-478b-b2f3-588fc2ebfeae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.614779] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 669.614779] env[68279]: value = "task-2962598" [ 669.614779] env[68279]: _type = "Task" [ 669.614779] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.626602] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.643608] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962595, 'name': Rename_Task, 'duration_secs': 0.24781} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.644019] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.644354] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8af2007-4587-4c5a-80b2-152fdeca8b29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.652202] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 669.652202] env[68279]: value = "task-2962599" [ 669.652202] env[68279]: _type = "Task" [ 669.652202] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.655038] env[68279]: DEBUG nova.compute.manager [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Received event network-changed-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.655543] env[68279]: DEBUG nova.compute.manager [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Refreshing instance network info cache due to event network-changed-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.655543] env[68279]: DEBUG oslo_concurrency.lockutils [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] Acquiring lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.655626] env[68279]: DEBUG oslo_concurrency.lockutils [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] Acquired lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.655757] env[68279]: DEBUG nova.network.neutron [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Refreshing network info cache for port 7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.667213] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.697731] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.260s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.700299] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.700s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.703072] env[68279]: INFO nova.compute.claims [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 669.739118] env[68279]: INFO nova.scheduler.client.report [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Deleted allocations for instance c9bda338-6c7d-4850-8f46-7cd916372ac9 [ 669.966378] env[68279]: DEBUG oslo_vmware.api [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Task: {'id': task-2962597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207471} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.966801] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.966801] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.966982] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.967161] env[68279]: INFO nova.compute.manager [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 669.967423] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.969054] env[68279]: DEBUG nova.compute.manager [-] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 669.969054] env[68279]: DEBUG nova.network.neutron [-] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.005696] env[68279]: DEBUG nova.network.neutron [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.043113] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be25d69-55f8-43f3-9217-e62da1e372d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.078704] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e1dbab-edae-4bd7-8dae-bbef1f217b04 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.090025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance '6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 670.127744] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962598, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.166781] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962599, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.241607] env[68279]: DEBUG nova.network.neutron [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Updating instance_info_cache with network_info: [{"id": "5486afba-2c1e-409f-8d28-00f3c857d1a3", "address": "fa:16:3e:2f:fb:97", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5486afba-2c", "ovs_interfaceid": "5486afba-2c1e-409f-8d28-00f3c857d1a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.253619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-610b13e8-118f-49e8-84bf-183960a98e96 tempest-ImagesNegativeTestJSON-1590220102 tempest-ImagesNegativeTestJSON-1590220102-project-member] Lock "c9bda338-6c7d-4850-8f46-7cd916372ac9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.526s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.444218] env[68279]: DEBUG nova.network.neutron [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Updated VIF entry in instance network info cache for port 7bb2c9fd-3cb4-4813-8661-d5baac85c2fd. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 670.444218] env[68279]: DEBUG nova.network.neutron [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Updating instance_info_cache with network_info: [{"id": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "address": "fa:16:3e:e1:8e:07", "network": {"id": "53711808-af51-416e-91d0-d0d066b1befb", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-761926569-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "486f9055435d482aa2ebcdf6fe29b948", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bb2c9fd-3c", "ovs_interfaceid": "7bb2c9fd-3cb4-4813-8661-d5baac85c2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.596221] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 670.596527] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d60ffb7-2bd6-453c-bbd7-9085e8909909 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.608491] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 670.608491] env[68279]: value = "task-2962600" [ 670.608491] env[68279]: _type = "Task" [ 670.608491] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.619526] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.628873] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962598, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.666300] env[68279]: DEBUG oslo_vmware.api [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962599, 'name': PowerOnVM_Task, 'duration_secs': 0.698311} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.667083] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.667083] env[68279]: INFO nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Took 9.43 seconds to spawn the instance on the hypervisor. [ 670.667228] env[68279]: DEBUG nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.668039] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fb56e3-7d90-4dbe-9e63-65232f03f999 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.749091] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Releasing lock "refresh_cache-11c439ab-e27c-43e6-b752-c90af5f84bc1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.753551] env[68279]: DEBUG nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Instance network_info: |[{"id": "5486afba-2c1e-409f-8d28-00f3c857d1a3", "address": "fa:16:3e:2f:fb:97", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5486afba-2c", "ovs_interfaceid": "5486afba-2c1e-409f-8d28-00f3c857d1a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 670.753551] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:fb:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5486afba-2c1e-409f-8d28-00f3c857d1a3', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.769556] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.773061] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 670.773669] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d0f4199-b294-464c-8fee-2bf2ba3ded4a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.799287] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.799287] env[68279]: value = "task-2962601" [ 670.799287] env[68279]: _type = "Task" [ 670.799287] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.813570] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962601, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.877305] env[68279]: DEBUG nova.compute.manager [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Received event network-vif-plugged-5486afba-2c1e-409f-8d28-00f3c857d1a3 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 670.877305] env[68279]: DEBUG oslo_concurrency.lockutils [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] Acquiring lock "11c439ab-e27c-43e6-b752-c90af5f84bc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.878144] env[68279]: DEBUG oslo_concurrency.lockutils [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.878512] env[68279]: DEBUG oslo_concurrency.lockutils [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.878809] env[68279]: DEBUG nova.compute.manager [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] No waiting events found dispatching network-vif-plugged-5486afba-2c1e-409f-8d28-00f3c857d1a3 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 670.879116] env[68279]: WARNING nova.compute.manager [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Received unexpected event network-vif-plugged-5486afba-2c1e-409f-8d28-00f3c857d1a3 for instance with vm_state building and task_state spawning. [ 670.879652] env[68279]: DEBUG nova.compute.manager [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Received event network-changed-5486afba-2c1e-409f-8d28-00f3c857d1a3 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 670.880144] env[68279]: DEBUG nova.compute.manager [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Refreshing instance network info cache due to event network-changed-5486afba-2c1e-409f-8d28-00f3c857d1a3. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 670.880473] env[68279]: DEBUG oslo_concurrency.lockutils [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] Acquiring lock "refresh_cache-11c439ab-e27c-43e6-b752-c90af5f84bc1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.880780] env[68279]: DEBUG oslo_concurrency.lockutils [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] Acquired lock "refresh_cache-11c439ab-e27c-43e6-b752-c90af5f84bc1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.881065] env[68279]: DEBUG nova.network.neutron [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Refreshing network info cache for port 5486afba-2c1e-409f-8d28-00f3c857d1a3 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.946328] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da37df-2c25-56ab-ed6e-47518b8f28ed/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 670.946934] env[68279]: DEBUG oslo_concurrency.lockutils [req-3f161f35-f5bb-4a0b-b933-ea88cb25314e req-85f1b3b2-b970-4a04-99cb-62874339260e service nova] Releasing lock "refresh_cache-336b7399-b64e-411f-99bc-ba0d292e371a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.948335] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22781b3-86b7-4cba-96fc-5d25f154937e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.960614] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da37df-2c25-56ab-ed6e-47518b8f28ed/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 670.960614] env[68279]: ERROR oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da37df-2c25-56ab-ed6e-47518b8f28ed/disk-0.vmdk due to incomplete transfer. [ 670.960851] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-63e92fda-4b63-4718-a130-8b22907e6f47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.974516] env[68279]: DEBUG oslo_vmware.rw_handles [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da37df-2c25-56ab-ed6e-47518b8f28ed/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 670.974833] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Uploaded image 90945600-cd0e-4e27-b4d0-bd15db2c3655 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 670.977050] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 670.977316] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7584e8d2-a127-43e1-9fde-46b895500fe9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.986269] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 670.986269] env[68279]: value = "task-2962602" [ 670.986269] env[68279]: _type = "Task" [ 670.986269] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.004125] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962602, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.123596] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962600, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.136186] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962598, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.192200] env[68279]: INFO nova.compute.manager [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Took 29.14 seconds to build instance. [ 671.310705] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962601, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.312538] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04587e7c-407b-48ca-aa2d-d95757feb90c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.321355] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ef7430-e8b5-425f-8697-e6793ea3d9ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.357229] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748106af-f788-45a2-a1be-0e9b443b164c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.365765] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461e4243-6fac-40cc-8566-ae16ef8d941f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.379980] env[68279]: DEBUG nova.compute.provider_tree [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.409479] env[68279]: DEBUG nova.network.neutron [-] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.436414] env[68279]: DEBUG nova.network.neutron [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Successfully updated port: 15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 671.497195] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962602, 'name': Destroy_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.618796] env[68279]: DEBUG oslo_vmware.api [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962600, 'name': PowerOnVM_Task, 'duration_secs': 1.004152} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.619114] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.619307] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-46f43596-de9a-4709-a4d6-6edba3037f9a tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance '6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 671.632444] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962598, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.537237} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.632714] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 92786813-f4ab-4ff7-8597-aa1aa90eeb01/92786813-f4ab-4ff7-8597-aa1aa90eeb01.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.632931] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.633241] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e04a5d69-5b15-4237-b86f-99fa1a440b4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.640992] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 671.640992] env[68279]: value = "task-2962603" [ 671.640992] env[68279]: _type = "Task" [ 671.640992] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.652165] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962603, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.695702] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f4eb44b7-e1d4-4ec0-a6bf-37a4ab3cc822 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "deea2dea-1860-45a0-9637-ced09bb51b81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.598s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.794051] env[68279]: DEBUG nova.network.neutron [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Updated VIF entry in instance network info cache for port 5486afba-2c1e-409f-8d28-00f3c857d1a3. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.794609] env[68279]: DEBUG nova.network.neutron [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Updating instance_info_cache with network_info: [{"id": "5486afba-2c1e-409f-8d28-00f3c857d1a3", "address": "fa:16:3e:2f:fb:97", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5486afba-2c", "ovs_interfaceid": "5486afba-2c1e-409f-8d28-00f3c857d1a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.811986] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962601, 'name': CreateVM_Task, 'duration_secs': 0.920475} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.812178] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 671.812851] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.813031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.813354] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 671.813656] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0744a44b-ec38-4686-a946-c2c8f66dbbe3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.818826] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 671.818826] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52324de9-7705-9c91-20ef-8142ad23e9d5" [ 671.818826] env[68279]: _type = "Task" [ 671.818826] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.827587] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52324de9-7705-9c91-20ef-8142ad23e9d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.886249] env[68279]: DEBUG nova.scheduler.client.report [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 671.912178] env[68279]: INFO nova.compute.manager [-] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Took 1.94 seconds to deallocate network for instance. [ 671.939276] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.939823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.939823] env[68279]: DEBUG nova.network.neutron [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.999165] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962602, 'name': Destroy_Task, 'duration_secs': 0.718806} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.999459] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Destroyed the VM [ 671.999803] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 672.000070] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-502487a0-6310-4789-a0b0-7a548ff2710d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.008561] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 672.008561] env[68279]: value = "task-2962604" [ 672.008561] env[68279]: _type = "Task" [ 672.008561] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.018023] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962604, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.153503] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962603, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077169} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.153692] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.154422] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a784e1e-4d9f-44ee-8cdc-3407c873ccfe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.180321] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 92786813-f4ab-4ff7-8597-aa1aa90eeb01/92786813-f4ab-4ff7-8597-aa1aa90eeb01.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.180413] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14017bb0-4973-4240-acb8-11d9689aeb3b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.197673] env[68279]: DEBUG nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 672.202869] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 672.202869] env[68279]: value = "task-2962605" [ 672.202869] env[68279]: _type = "Task" [ 672.202869] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.213389] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.298352] env[68279]: DEBUG oslo_concurrency.lockutils [req-8a2ba3e1-4569-47c9-8e39-040f2596cacd req-a5c07f17-a53f-4342-839c-c89fdcfa2642 service nova] Releasing lock "refresh_cache-11c439ab-e27c-43e6-b752-c90af5f84bc1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.331679] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52324de9-7705-9c91-20ef-8142ad23e9d5, 'name': SearchDatastore_Task, 'duration_secs': 0.06192} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.331881] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.332141] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 672.332493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.333549] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.333549] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 672.333549] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccb5b317-002b-4d5d-94b7-e7ef922741f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.344751] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 672.344996] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 672.349025] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbfe02f9-d7be-4636-b388-b0317794c959 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.354027] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 672.354027] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ea53de-5a18-ab3b-be39-6e311aab24f7" [ 672.354027] env[68279]: _type = "Task" [ 672.354027] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.367486] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ea53de-5a18-ab3b-be39-6e311aab24f7, 'name': SearchDatastore_Task, 'duration_secs': 0.011192} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.368438] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2789f1f-0477-4be6-b8da-288e2fa0ed3b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.375528] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 672.375528] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52db1a09-f7f8-e403-72b2-d9113d756b53" [ 672.375528] env[68279]: _type = "Task" [ 672.375528] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.386214] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52db1a09-f7f8-e403-72b2-d9113d756b53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.391254] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.691s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.391797] env[68279]: DEBUG nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 672.395399] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.837s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.395399] env[68279]: DEBUG nova.objects.instance [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lazy-loading 'resources' on Instance uuid b869231a-5293-433f-ac7c-d50030368826 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 672.419982] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.476264] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f41f35fb-3c1a-4ed5-b24a-43348e40c835 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "interface-deea2dea-1860-45a0-9637-ced09bb51b81-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.477935] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f41f35fb-3c1a-4ed5-b24a-43348e40c835 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "interface-deea2dea-1860-45a0-9637-ced09bb51b81-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.477935] env[68279]: DEBUG nova.objects.instance [None req-f41f35fb-3c1a-4ed5-b24a-43348e40c835 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lazy-loading 'flavor' on Instance uuid deea2dea-1860-45a0-9637-ced09bb51b81 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 672.514307] env[68279]: DEBUG nova.network.neutron [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.528085] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962604, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.720213] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962605, 'name': ReconfigVM_Task, 'duration_secs': 0.467494} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.720522] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 92786813-f4ab-4ff7-8597-aa1aa90eeb01/92786813-f4ab-4ff7-8597-aa1aa90eeb01.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.721245] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10c026cc-c80d-46cf-865c-e33afc6f9ed1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.730988] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 672.730988] env[68279]: value = "task-2962606" [ 672.730988] env[68279]: _type = "Task" [ 672.730988] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.733025] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.743370] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962606, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.798015] env[68279]: DEBUG nova.network.neutron [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15317896-8b", "ovs_interfaceid": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.887187] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52db1a09-f7f8-e403-72b2-d9113d756b53, 'name': SearchDatastore_Task, 'duration_secs': 0.011506} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.887580] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.887936] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 11c439ab-e27c-43e6-b752-c90af5f84bc1/11c439ab-e27c-43e6-b752-c90af5f84bc1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 672.888261] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17f78eac-46cb-454b-9517-c312574c0ad5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.897444] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 672.897444] env[68279]: value = "task-2962607" [ 672.897444] env[68279]: _type = "Task" [ 672.897444] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.902922] env[68279]: DEBUG nova.compute.utils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 672.908818] env[68279]: DEBUG nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 672.908887] env[68279]: DEBUG nova.network.neutron [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 672.923445] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.976810] env[68279]: DEBUG nova.policy [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8831a56664404da3a03d6d8241e693ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91ef3e24b66c44a29463a982c192a06e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 672.981496] env[68279]: DEBUG nova.objects.instance [None req-f41f35fb-3c1a-4ed5-b24a-43348e40c835 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lazy-loading 'pci_requests' on Instance uuid deea2dea-1860-45a0-9637-ced09bb51b81 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 673.022263] env[68279]: DEBUG oslo_vmware.api [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962604, 'name': RemoveSnapshot_Task, 'duration_secs': 0.989143} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.022562] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 673.022796] env[68279]: INFO nova.compute.manager [None req-08c3e22e-42d3-4d1b-adb1-912986300fe4 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Took 16.03 seconds to snapshot the instance on the hypervisor. [ 673.250827] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962606, 'name': Rename_Task, 'duration_secs': 0.257306} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.250827] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 673.250827] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d70380d2-e027-4143-a6ba-2df1e29af037 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.263541] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 673.263541] env[68279]: value = "task-2962608" [ 673.263541] env[68279]: _type = "Task" [ 673.263541] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.276692] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.301024] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.301422] env[68279]: DEBUG nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance network_info: |[{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15317896-8b", "ovs_interfaceid": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 673.301904] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:5f:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15317896-8bd1-46c4-8fc9-8bf0966392a4', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 673.313520] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating folder: Project (34fd2747aeac4bcd9dd18075cf4ebd8b). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.319854] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8cd719b9-048c-4660-9fda-8e52047a4f53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.339942] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created folder: Project (34fd2747aeac4bcd9dd18075cf4ebd8b) in parent group-v594445. [ 673.339942] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating folder: Instances. Parent ref: group-v594511. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.339942] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abccf7b9-06cb-4abe-8fdc-b57a6759fc93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.361402] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created folder: Instances in parent group-v594511. [ 673.361605] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.361743] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 673.361979] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d435f75c-06d4-4fff-a943-f4710225599b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.390629] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 673.390629] env[68279]: value = "task-2962611" [ 673.390629] env[68279]: _type = "Task" [ 673.390629] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.405945] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962611, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.408358] env[68279]: DEBUG nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 673.417248] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962607, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.463257] env[68279]: DEBUG nova.network.neutron [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Successfully created port: 4da65432-061e-4e08-a5b9-cb90b33ffc25 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.486588] env[68279]: DEBUG nova.objects.base [None req-f41f35fb-3c1a-4ed5-b24a-43348e40c835 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 673.486940] env[68279]: DEBUG nova.network.neutron [None req-f41f35fb-3c1a-4ed5-b24a-43348e40c835 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.516686] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f24cdd-d70f-45a1-bda3-e3f66dde1e3e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.527501] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1807ce92-22e2-4937-a4ef-4dafcf06eb07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.568568] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ec2d68-be5c-475e-b174-6cb74c04f852 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.579581] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfbaff3-b67c-4bdf-a3dc-4d9e6baba8a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.598041] env[68279]: DEBUG nova.compute.provider_tree [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.677389] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f41f35fb-3c1a-4ed5-b24a-43348e40c835 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "interface-deea2dea-1860-45a0-9637-ced09bb51b81-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.201s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.775845] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.901402] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962611, 'name': CreateVM_Task, 'duration_secs': 0.495365} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.904501] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.905374] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.905592] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.905944] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.906647] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-291c8fea-d85f-4e20-9188-2d9c1d306171 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.912996] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639317} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.914303] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 11c439ab-e27c-43e6-b752-c90af5f84bc1/11c439ab-e27c-43e6-b752-c90af5f84bc1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 673.914582] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 673.914824] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 673.914824] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fef3f4-27d4-c8ed-f888-a5b6847c3266" [ 673.914824] env[68279]: _type = "Task" [ 673.914824] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.915036] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57c77e35-f1b1-43c8-8c9a-de04b312804e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.929944] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 673.929944] env[68279]: value = "task-2962612" [ 673.929944] env[68279]: _type = "Task" [ 673.929944] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.939725] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fef3f4-27d4-c8ed-f888-a5b6847c3266, 'name': SearchDatastore_Task, 'duration_secs': 0.011809} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.939725] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.939872] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.940122] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.940308] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.940602] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.941582] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34b0f028-7930-4c74-b1c4-aa420e6ff228 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.948830] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962612, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.958961] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.958961] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.959600] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efdb0c62-5bf7-438e-8e0a-88ac74430036 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.966551] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 673.966551] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b481f4-ff61-bb47-a224-5d06aff033c8" [ 673.966551] env[68279]: _type = "Task" [ 673.966551] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.977421] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b481f4-ff61-bb47-a224-5d06aff033c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.103041] env[68279]: DEBUG nova.scheduler.client.report [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 674.126965] env[68279]: DEBUG nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Received event network-vif-deleted-4fbdad72-dc39-4f81-80af-7eb2c9db8d4e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.127210] env[68279]: DEBUG nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Received event network-changed-7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.127446] env[68279]: DEBUG nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Refreshing instance network info cache due to event network-changed-7dcc683d-e5ad-49a8-8e28-a1af77590026. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 674.127672] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Acquiring lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.127811] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Acquired lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.127968] env[68279]: DEBUG nova.network.neutron [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Refreshing network info cache for port 7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.280785] env[68279]: DEBUG oslo_vmware.api [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962608, 'name': PowerOnVM_Task, 'duration_secs': 0.829418} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.281083] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 674.281398] env[68279]: INFO nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Took 10.29 seconds to spawn the instance on the hypervisor. [ 674.281657] env[68279]: DEBUG nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 674.282745] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889c1a2f-c096-449f-810a-58e4f42b5211 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.432080] env[68279]: DEBUG nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 674.444154] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962612, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085767} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.444450] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 674.445505] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06a4b78-a2dd-4797-aeaa-623fec017bfb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.470600] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 11c439ab-e27c-43e6-b752-c90af5f84bc1/11c439ab-e27c-43e6-b752-c90af5f84bc1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 674.472873] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0631e1cb-da52-432b-8523-4fc51ee4f1ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.493860] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 674.494125] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.494323] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 674.494544] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.494880] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 674.494880] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 674.495076] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 674.495271] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 674.495452] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 674.495613] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 674.495787] env[68279]: DEBUG nova.virt.hardware [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 674.496609] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01a956b-918f-42e7-a218-339f69c3c467 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.503454] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 674.503454] env[68279]: value = "task-2962613" [ 674.503454] env[68279]: _type = "Task" [ 674.503454] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.503713] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b481f4-ff61-bb47-a224-5d06aff033c8, 'name': SearchDatastore_Task, 'duration_secs': 0.01249} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.508194] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d540755-34c6-4c8e-80a6-5c232b55da7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.513752] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec100d9-9298-4a7d-a3de-c0c10c447efd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.526066] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.526307] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 674.526307] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a2d31-acdf-4831-7f29-27672233645b" [ 674.526307] env[68279]: _type = "Task" [ 674.526307] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.545911] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a2d31-acdf-4831-7f29-27672233645b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.609347] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.214s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.612255] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.038s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.612373] env[68279]: DEBUG nova.objects.instance [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lazy-loading 'resources' on Instance uuid 4c99c929-9fda-42f0-9327-0508ad3e6150 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 674.635772] env[68279]: INFO nova.scheduler.client.report [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Deleted allocations for instance b869231a-5293-433f-ac7c-d50030368826 [ 674.803977] env[68279]: INFO nova.compute.manager [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Took 31.79 seconds to build instance. [ 674.918919] env[68279]: DEBUG nova.network.neutron [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updated VIF entry in instance network info cache for port 7dcc683d-e5ad-49a8-8e28-a1af77590026. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 674.918919] env[68279]: DEBUG nova.network.neutron [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updating instance_info_cache with network_info: [{"id": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "address": "fa:16:3e:f1:ad:db", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcc683d-e5", "ovs_interfaceid": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.017089] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.039031] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a2d31-acdf-4831-7f29-27672233645b, 'name': SearchDatastore_Task, 'duration_secs': 0.04186} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.039335] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.041775] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 675.041775] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c426da33-5887-4bbd-befb-4bf104e34969 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.050285] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 675.050285] env[68279]: value = "task-2962614" [ 675.050285] env[68279]: _type = "Task" [ 675.050285] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.063293] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962614, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.144972] env[68279]: DEBUG oslo_concurrency.lockutils [None req-181b1b73-3d1d-4e1b-8411-50b510a263bd tempest-InstanceActionsNegativeTestJSON-28975537 tempest-InstanceActionsNegativeTestJSON-28975537-project-member] Lock "b869231a-5293-433f-ac7c-d50030368826" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.242s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.307429] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52ce9f04-1c21-48e1-abb7-46e1676d9bfb tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.869s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.353646] env[68279]: DEBUG nova.network.neutron [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Successfully updated port: 4da65432-061e-4e08-a5b9-cb90b33ffc25 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 675.359022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.359022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.359022] env[68279]: DEBUG nova.compute.manager [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Going to confirm migration 1 {{(pid=68279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 675.421125] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Releasing lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.421852] env[68279]: DEBUG nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-vif-plugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 675.421957] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.422428] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.422843] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.423312] env[68279]: DEBUG nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] No waiting events found dispatching network-vif-plugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 675.423689] env[68279]: WARNING nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received unexpected event network-vif-plugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 for instance with vm_state building and task_state spawning. [ 675.423980] env[68279]: DEBUG nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 675.426815] env[68279]: DEBUG nova.compute.manager [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing instance network info cache due to event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 675.427621] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Acquiring lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.427955] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Acquired lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.428311] env[68279]: DEBUG nova.network.neutron [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.520908] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962613, 'name': ReconfigVM_Task, 'duration_secs': 0.716339} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.520908] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 11c439ab-e27c-43e6-b752-c90af5f84bc1/11c439ab-e27c-43e6-b752-c90af5f84bc1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 675.524687] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-742f6f12-1ea6-4418-afd8-5411f63ff624 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.532452] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 675.532452] env[68279]: value = "task-2962615" [ 675.532452] env[68279]: _type = "Task" [ 675.532452] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.550550] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962615, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.566177] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962614, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.689552] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e4b95d-4088-401a-a0c0-0f409a84aa84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.698093] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d6e883-4f29-4121-8deb-f29c5806f20d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.730738] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f87fef0-74df-4d57-bee0-9b3eb4fa7b95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.742394] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f496108f-8700-4a38-9d5e-a8bb0e41b394 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.757813] env[68279]: DEBUG nova.compute.provider_tree [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.810890] env[68279]: DEBUG nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 675.823639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "refresh_cache-bf4e6484-d17d-4244-9163-1ef0012874b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.823639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "refresh_cache-bf4e6484-d17d-4244-9163-1ef0012874b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.823718] env[68279]: DEBUG nova.network.neutron [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 675.939271] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.939458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.939636] env[68279]: DEBUG nova.network.neutron [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 675.939816] env[68279]: DEBUG nova.objects.instance [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lazy-loading 'info_cache' on Instance uuid 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 676.033662] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.033884] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.046488] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962615, 'name': Rename_Task, 'duration_secs': 0.285889} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.046784] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.047041] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-230c1a76-30a1-4c3b-8b4e-a84da006a3e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.055301] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 676.055301] env[68279]: value = "task-2962616" [ 676.055301] env[68279]: _type = "Task" [ 676.055301] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.061446] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962614, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.066883] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.237095] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "deea2dea-1860-45a0-9637-ced09bb51b81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.237453] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "deea2dea-1860-45a0-9637-ced09bb51b81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.237699] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "deea2dea-1860-45a0-9637-ced09bb51b81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.237938] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "deea2dea-1860-45a0-9637-ced09bb51b81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.238563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "deea2dea-1860-45a0-9637-ced09bb51b81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.242238] env[68279]: INFO nova.compute.manager [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Terminating instance [ 676.264061] env[68279]: DEBUG nova.scheduler.client.report [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.336707] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.396932] env[68279]: DEBUG nova.network.neutron [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.562862] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962614, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.568702] env[68279]: DEBUG oslo_vmware.api [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962616, 'name': PowerOnVM_Task, 'duration_secs': 0.481201} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.569078] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 676.569422] env[68279]: INFO nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Took 9.85 seconds to spawn the instance on the hypervisor. [ 676.569671] env[68279]: DEBUG nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 676.570593] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ddc0b3-52ce-4fd4-8c10-30e132413f0a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.589720] env[68279]: DEBUG nova.network.neutron [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updated VIF entry in instance network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 676.590307] env[68279]: DEBUG nova.network.neutron [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15317896-8b", "ovs_interfaceid": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.739701] env[68279]: DEBUG nova.network.neutron [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Updating instance_info_cache with network_info: [{"id": "4da65432-061e-4e08-a5b9-cb90b33ffc25", "address": "fa:16:3e:db:41:28", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da65432-06", "ovs_interfaceid": "4da65432-061e-4e08-a5b9-cb90b33ffc25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.748380] env[68279]: DEBUG nova.compute.manager [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 676.748637] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 676.749883] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fa5c4b-41ed-4459-864c-f866822791f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.760310] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 676.760635] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cb0e2d8-a9a8-4a48-a17f-8dbf65016487 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.768708] env[68279]: DEBUG oslo_vmware.api [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 676.768708] env[68279]: value = "task-2962617" [ 676.768708] env[68279]: _type = "Task" [ 676.768708] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.769535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.780328] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.478s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.780561] env[68279]: DEBUG nova.objects.instance [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lazy-loading 'resources' on Instance uuid 239d0522-5101-49e0-8d3b-85b54927cd21 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 676.790515] env[68279]: DEBUG oslo_vmware.api [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.797164] env[68279]: DEBUG nova.compute.manager [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Received event network-vif-plugged-4da65432-061e-4e08-a5b9-cb90b33ffc25 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 676.797164] env[68279]: DEBUG oslo_concurrency.lockutils [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] Acquiring lock "bf4e6484-d17d-4244-9163-1ef0012874b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.797164] env[68279]: DEBUG oslo_concurrency.lockutils [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.797164] env[68279]: DEBUG oslo_concurrency.lockutils [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.797164] env[68279]: DEBUG nova.compute.manager [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] No waiting events found dispatching network-vif-plugged-4da65432-061e-4e08-a5b9-cb90b33ffc25 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 676.797164] env[68279]: WARNING nova.compute.manager [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Received unexpected event network-vif-plugged-4da65432-061e-4e08-a5b9-cb90b33ffc25 for instance with vm_state building and task_state spawning. [ 676.797164] env[68279]: DEBUG nova.compute.manager [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Received event network-changed-4da65432-061e-4e08-a5b9-cb90b33ffc25 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 676.797164] env[68279]: DEBUG nova.compute.manager [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Refreshing instance network info cache due to event network-changed-4da65432-061e-4e08-a5b9-cb90b33ffc25. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 676.797164] env[68279]: DEBUG oslo_concurrency.lockutils [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] Acquiring lock "refresh_cache-bf4e6484-d17d-4244-9163-1ef0012874b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.806875] env[68279]: INFO nova.scheduler.client.report [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Deleted allocations for instance 4c99c929-9fda-42f0-9327-0508ad3e6150 [ 677.070733] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962614, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.602029} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.071010] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.071241] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.071492] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf5036a5-8232-4c6d-b17b-27c978aa362e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.079768] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 677.079768] env[68279]: value = "task-2962618" [ 677.079768] env[68279]: _type = "Task" [ 677.079768] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.099033] env[68279]: DEBUG oslo_concurrency.lockutils [req-f95072d7-09fd-443e-9784-d20f5c537786 req-32849777-6a3b-4eb8-96d1-9b2f8fd704af service nova] Releasing lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.099033] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.102674] env[68279]: INFO nova.compute.manager [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Took 32.97 seconds to build instance. [ 677.247029] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "refresh_cache-bf4e6484-d17d-4244-9163-1ef0012874b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.247029] env[68279]: DEBUG nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Instance network_info: |[{"id": "4da65432-061e-4e08-a5b9-cb90b33ffc25", "address": "fa:16:3e:db:41:28", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da65432-06", "ovs_interfaceid": "4da65432-061e-4e08-a5b9-cb90b33ffc25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 677.247029] env[68279]: DEBUG oslo_concurrency.lockutils [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] Acquired lock "refresh_cache-bf4e6484-d17d-4244-9163-1ef0012874b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.247029] env[68279]: DEBUG nova.network.neutron [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Refreshing network info cache for port 4da65432-061e-4e08-a5b9-cb90b33ffc25 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 677.251071] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:41:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4da65432-061e-4e08-a5b9-cb90b33ffc25', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.263721] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Creating folder: Project (91ef3e24b66c44a29463a982c192a06e). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.265146] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a0c9d2a-f72e-47f9-91b1-44d17d7fb489 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.279375] env[68279]: DEBUG oslo_vmware.api [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962617, 'name': PowerOffVM_Task, 'duration_secs': 0.283162} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.281887] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 677.281887] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 677.281887] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Created folder: Project (91ef3e24b66c44a29463a982c192a06e) in parent group-v594445. [ 677.281887] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Creating folder: Instances. Parent ref: group-v594514. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.281887] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10dd08d5-1cce-4bb7-b857-d1889d9c3e93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.283940] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d72b256f-5c4d-4fcb-9be4-d7cfc8673314 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.298402] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Created folder: Instances in parent group-v594514. [ 677.298687] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 677.298948] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 677.300018] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf2770c1-98ea-4e96-8985-60e2503aa434 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.322470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796388df-6d05-46ac-bcf7-be7037ac7047 tempest-ServerExternalEventsTest-1558896377 tempest-ServerExternalEventsTest-1558896377-project-member] Lock "4c99c929-9fda-42f0-9327-0508ad3e6150" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.464s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.327627] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 677.327627] env[68279]: value = "task-2962622" [ 677.327627] env[68279]: _type = "Task" [ 677.327627] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.333960] env[68279]: DEBUG nova.network.neutron [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance_info_cache with network_info: [{"id": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "address": "fa:16:3e:00:83:88", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b424ba-67", "ovs_interfaceid": "16b424ba-6749-431c-bdc5-22c910ad0fe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.344685] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962622, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.364256] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 677.364256] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 677.364256] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Deleting the datastore file [datastore1] deea2dea-1860-45a0-9637-ced09bb51b81 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 677.364256] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f44e615-bbcb-42c2-aadf-b075c542f4f0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.375268] env[68279]: DEBUG oslo_vmware.api [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for the task: (returnval){ [ 677.375268] env[68279]: value = "task-2962623" [ 677.375268] env[68279]: _type = "Task" [ 677.375268] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.387759] env[68279]: DEBUG oslo_vmware.api [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.435820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.435820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.435938] env[68279]: DEBUG nova.compute.manager [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 677.436849] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca5f9dc-f159-40a0-9a37-def835b399d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.449386] env[68279]: DEBUG nova.compute.manager [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 677.450067] env[68279]: DEBUG nova.objects.instance [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lazy-loading 'flavor' on Instance uuid 92786813-f4ab-4ff7-8597-aa1aa90eeb01 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 677.605140] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962618, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120025} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.608576] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.609200] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2eaab320-2c94-4a16-9134-2bf26ae3d826 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.170s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.611413] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074d7356-fee1-4a41-ad35-4512ae986996 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.643455] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.649177] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfe736bf-d517-4a05-a080-47ff39f6e9b8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.674941] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 677.674941] env[68279]: value = "task-2962624" [ 677.674941] env[68279]: _type = "Task" [ 677.674941] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.688374] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962624, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.839638] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.839880] env[68279]: DEBUG nova.objects.instance [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lazy-loading 'migration_context' on Instance uuid 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 677.852355] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962622, 'name': CreateVM_Task, 'duration_secs': 0.449402} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.855928] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 677.858117] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.858300] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.858619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 677.860362] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b0b11fd-4c25-4145-9f20-101a19c7df26 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.869703] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 677.869703] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b8889f-5e9a-dcbb-4f2b-2464d2bd5d8d" [ 677.869703] env[68279]: _type = "Task" [ 677.869703] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.881945] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b8889f-5e9a-dcbb-4f2b-2464d2bd5d8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.890871] env[68279]: DEBUG oslo_vmware.api [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Task: {'id': task-2962623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214615} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.891712] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 677.892016] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 677.892204] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 677.892376] env[68279]: INFO nova.compute.manager [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Took 1.14 seconds to destroy the instance on the hypervisor. [ 677.892666] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 677.892894] env[68279]: DEBUG nova.compute.manager [-] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 677.892986] env[68279]: DEBUG nova.network.neutron [-] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 677.912724] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789c11ae-3a6f-4db8-93c2-bdbf051488bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.921185] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1600d31-0e5d-43ca-ac84-b5b403322813 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.962065] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71cc355-721a-422b-8247-26f87ca440e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.971913] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464441a9-d150-4221-9f27-10e64c79e143 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.988756] env[68279]: DEBUG nova.compute.provider_tree [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.120219] env[68279]: DEBUG nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 678.191219] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.254746] env[68279]: DEBUG nova.network.neutron [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Updated VIF entry in instance network info cache for port 4da65432-061e-4e08-a5b9-cb90b33ffc25. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 678.258285] env[68279]: DEBUG nova.network.neutron [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Updating instance_info_cache with network_info: [{"id": "4da65432-061e-4e08-a5b9-cb90b33ffc25", "address": "fa:16:3e:db:41:28", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da65432-06", "ovs_interfaceid": "4da65432-061e-4e08-a5b9-cb90b33ffc25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.354257] env[68279]: DEBUG nova.objects.base [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Object Instance<6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1> lazy-loaded attributes: info_cache,migration_context {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 678.355627] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815f04d5-9d75-4c27-8790-a11c0a0bf8f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.386753] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9f22a84-800c-40f8-8a98-40e82d2bbd34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.397649] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b8889f-5e9a-dcbb-4f2b-2464d2bd5d8d, 'name': SearchDatastore_Task, 'duration_secs': 0.012275} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.398028] env[68279]: DEBUG oslo_vmware.api [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 678.398028] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5234756e-1271-1d3a-3ff8-c1f4fbdd0051" [ 678.398028] env[68279]: _type = "Task" [ 678.398028] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.398222] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.398467] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 678.399020] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.399020] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.399020] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 678.399314] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1afb945a-9b64-4e8f-be0f-9cac2f0cd739 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.414302] env[68279]: DEBUG oslo_vmware.api [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5234756e-1271-1d3a-3ff8-c1f4fbdd0051, 'name': SearchDatastore_Task, 'duration_secs': 0.012349} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.415660] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.416060] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 678.416493] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 678.417358] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2166be34-5b82-406d-8528-e419d9408fcf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.424146] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 678.424146] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528463be-0928-6440-0d82-b9752113edfb" [ 678.424146] env[68279]: _type = "Task" [ 678.424146] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.434244] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528463be-0928-6440-0d82-b9752113edfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.464212] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 678.465511] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51a57fb2-0014-4ae4-bfe6-08f886bf95cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.476224] env[68279]: DEBUG oslo_vmware.api [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 678.476224] env[68279]: value = "task-2962625" [ 678.476224] env[68279]: _type = "Task" [ 678.476224] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.493509] env[68279]: DEBUG nova.scheduler.client.report [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 678.497682] env[68279]: DEBUG oslo_vmware.api [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962625, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.654144] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.687351] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962624, 'name': ReconfigVM_Task, 'duration_secs': 0.712607} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.687351] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfigured VM instance instance-00000017 to attach disk [datastore2] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.687913] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4004be9c-94bf-41ef-abcb-f8bd0eac5426 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.695261] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 678.695261] env[68279]: value = "task-2962626" [ 678.695261] env[68279]: _type = "Task" [ 678.695261] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.704955] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962626, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.763493] env[68279]: DEBUG oslo_concurrency.lockutils [req-81d16aed-eb3a-475d-b397-14bec880c566 req-6cf198fb-2572-441b-a9cc-07693264e8e6 service nova] Releasing lock "refresh_cache-bf4e6484-d17d-4244-9163-1ef0012874b8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.938204] env[68279]: DEBUG nova.network.neutron [-] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.939803] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528463be-0928-6440-0d82-b9752113edfb, 'name': SearchDatastore_Task, 'duration_secs': 0.020203} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.940964] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a752aa8-15e3-4230-a846-5a92fa483557 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.949095] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 678.949095] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521c62a5-35c4-18a9-bd82-4150b62c4458" [ 678.949095] env[68279]: _type = "Task" [ 678.949095] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.959120] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521c62a5-35c4-18a9-bd82-4150b62c4458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.986586] env[68279]: DEBUG oslo_vmware.api [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962625, 'name': PowerOffVM_Task, 'duration_secs': 0.204691} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.986586] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 678.986930] env[68279]: DEBUG nova.compute.manager [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.987490] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce59ee9-9598-48b8-83b6-fc97af24828f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.998827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.219s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.001949] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.902s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.004757] env[68279]: INFO nova.compute.claims [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.031183] env[68279]: INFO nova.scheduler.client.report [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Deleted allocations for instance 239d0522-5101-49e0-8d3b-85b54927cd21 [ 679.207406] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962626, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.259267] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "11c439ab-e27c-43e6-b752-c90af5f84bc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.260246] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.260885] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "11c439ab-e27c-43e6-b752-c90af5f84bc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.260885] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.261184] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.263837] env[68279]: INFO nova.compute.manager [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Terminating instance [ 679.445991] env[68279]: INFO nova.compute.manager [-] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Took 1.55 seconds to deallocate network for instance. [ 679.469818] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521c62a5-35c4-18a9-bd82-4150b62c4458, 'name': SearchDatastore_Task, 'duration_secs': 0.012517} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.470183] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.470486] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] bf4e6484-d17d-4244-9163-1ef0012874b8/bf4e6484-d17d-4244-9163-1ef0012874b8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 679.470811] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf29e462-c649-453d-8661-67127a47d2a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.481026] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 679.481026] env[68279]: value = "task-2962627" [ 679.481026] env[68279]: _type = "Task" [ 679.481026] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.491142] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.500028] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6348f664-716b-4b81-b880-f04cac2f17f2 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.538062] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43fff3f7-46dc-49f7-8ed4-89477b3ea33b tempest-ServerDiagnosticsV248Test-215553345 tempest-ServerDiagnosticsV248Test-215553345-project-member] Lock "239d0522-5101-49e0-8d3b-85b54927cd21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.760s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.601022] env[68279]: DEBUG nova.compute.manager [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 679.601712] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482ddf56-2ef6-4199-b4a4-a534ab1da39b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.622061] env[68279]: DEBUG nova.compute.manager [req-3dd34fe4-b5ec-4906-9b55-723472f4c051 req-796394d6-221b-446a-8360-cb6b319cc802 service nova] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Received event network-vif-deleted-ee8f22d1-04e2-4f48-a66c-35de46d6f8ba {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 679.708365] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962626, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.769313] env[68279]: DEBUG nova.compute.manager [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 679.769313] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.770499] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0436cb7d-9a84-4d3f-a162-a9456d60f72a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.780064] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 679.780419] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41600966-7b95-4ae5-b00b-e77f41308c0d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.788860] env[68279]: DEBUG oslo_vmware.api [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 679.788860] env[68279]: value = "task-2962628" [ 679.788860] env[68279]: _type = "Task" [ 679.788860] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.797945] env[68279]: DEBUG oslo_vmware.api [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.960027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.994344] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962627, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.114018] env[68279]: INFO nova.compute.manager [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] instance snapshotting [ 680.117053] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3882a3f0-4624-4c02-8f2c-010c9552cde4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.148424] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44fbb36-be48-4642-929d-c75b2b4928d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.210829] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962626, 'name': Rename_Task, 'duration_secs': 1.214068} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.211132] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 680.211394] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-502b8db2-b9c0-46a6-ad27-58b0ac52d08e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.223544] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 680.223544] env[68279]: value = "task-2962629" [ 680.223544] env[68279]: _type = "Task" [ 680.223544] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.245523] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.304825] env[68279]: DEBUG oslo_vmware.api [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962628, 'name': PowerOffVM_Task, 'duration_secs': 0.205236} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.304825] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 680.304825] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 680.304825] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fc8a395-2f42-4cab-98fc-225c78265b7c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.376761] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 680.377096] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 680.377204] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Deleting the datastore file [datastore2] 11c439ab-e27c-43e6-b752-c90af5f84bc1 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 680.377684] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2411c9a0-d607-44fc-9de4-a6a27401b508 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.390514] env[68279]: DEBUG oslo_vmware.api [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for the task: (returnval){ [ 680.390514] env[68279]: value = "task-2962631" [ 680.390514] env[68279]: _type = "Task" [ 680.390514] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.401151] env[68279]: DEBUG oslo_vmware.api [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.494038] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962627, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852663} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.494641] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] bf4e6484-d17d-4244-9163-1ef0012874b8/bf4e6484-d17d-4244-9163-1ef0012874b8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 680.494909] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 680.495448] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8513e046-5b82-4000-98a3-e89ef53c0648 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.503599] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 680.503599] env[68279]: value = "task-2962632" [ 680.503599] env[68279]: _type = "Task" [ 680.503599] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.518677] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.667266] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593a10ed-8773-41e3-a9e8-fc5d536e1d84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.671852] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 680.672697] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-38205d40-5ab6-4af6-a456-c45b64957b66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.677870] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e181f90-5629-4bd0-a93c-2080b6d21d30 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.683937] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 680.683937] env[68279]: value = "task-2962633" [ 680.683937] env[68279]: _type = "Task" [ 680.683937] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.717599] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70cb388-4a38-473f-b567-b6264224a0bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.724332] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962633, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.464034] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5802e93b-1e9f-4117-ae2b-a91d516e6403 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.472933] env[68279]: DEBUG oslo_vmware.api [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2962629, 'name': PowerOnVM_Task, 'duration_secs': 0.65176} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.476278] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 681.476651] env[68279]: INFO nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Took 12.01 seconds to spawn the instance on the hypervisor. [ 681.476944] env[68279]: DEBUG nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 681.482090] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96850e56-7e5b-41da-b8f5-847738b9d2e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.489529] env[68279]: DEBUG oslo_vmware.api [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Task: {'id': task-2962631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228209} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.489767] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075049} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.500841] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 681.501606] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 681.501606] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 681.501606] env[68279]: INFO nova.compute.manager [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Took 1.73 seconds to destroy the instance on the hypervisor. [ 681.501606] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 681.501872] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 681.502317] env[68279]: DEBUG nova.compute.provider_tree [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.504034] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962633, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.506152] env[68279]: DEBUG nova.compute.manager [-] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 681.506317] env[68279]: DEBUG nova.network.neutron [-] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.508548] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8948bd29-41dd-4472-a81b-59442de3042d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.537102] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] bf4e6484-d17d-4244-9163-1ef0012874b8/bf4e6484-d17d-4244-9163-1ef0012874b8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 681.537751] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c7ce499-02ed-4251-8cff-f70086cdd09f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.559954] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 681.559954] env[68279]: value = "task-2962634" [ 681.559954] env[68279]: _type = "Task" [ 681.559954] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.569643] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962634, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.963867] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962633, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.974200] env[68279]: DEBUG nova.compute.manager [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 681.975071] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33862a5-21de-491c-b08f-d8662cab2c3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.006048] env[68279]: DEBUG nova.scheduler.client.report [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.027625] env[68279]: INFO nova.compute.manager [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Took 35.78 seconds to build instance. [ 682.077392] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962634, 'name': ReconfigVM_Task, 'duration_secs': 0.288786} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.079156] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Reconfigured VM instance instance-00000018 to attach disk [datastore1] bf4e6484-d17d-4244-9163-1ef0012874b8/bf4e6484-d17d-4244-9163-1ef0012874b8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 682.079156] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-235af043-280b-4a80-b06d-eb276055f125 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.086382] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 682.086382] env[68279]: value = "task-2962635" [ 682.086382] env[68279]: _type = "Task" [ 682.086382] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.097468] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962635, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.461815] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962633, 'name': CreateSnapshot_Task, 'duration_secs': 1.461905} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.462052] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 682.462974] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f066b06d-d434-43f9-b1b6-4dd122c60858 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.487328] env[68279]: INFO nova.compute.manager [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] instance snapshotting [ 682.487328] env[68279]: WARNING nova.compute.manager [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 682.494215] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69eb1026-e5b9-4e45-8728-0ecf187cd93a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.517328] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.516s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.518891] env[68279]: DEBUG nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 682.521297] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.571s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.523365] env[68279]: INFO nova.compute.claims [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.527160] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3582d574-4a12-4c36-973a-65cbc17882c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.531032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4dc8bbf7-a4da-4c17-85c7-feb0f5c4108c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.324s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.601391] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962635, 'name': Rename_Task, 'duration_secs': 0.144029} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.601391] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 682.601391] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4cc243d9-5272-44ee-a974-68e618b170ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.609333] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 682.609333] env[68279]: value = "task-2962636" [ 682.609333] env[68279]: _type = "Task" [ 682.609333] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.621185] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962636, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.992946] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 682.993421] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7714817d-de02-4551-bbe2-6876c8465abf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.007189] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 683.007189] env[68279]: value = "task-2962637" [ 683.007189] env[68279]: _type = "Task" [ 683.007189] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.015668] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962637, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.025021] env[68279]: DEBUG nova.compute.utils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 683.025021] env[68279]: DEBUG nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 683.025471] env[68279]: DEBUG nova.network.neutron [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 683.034108] env[68279]: DEBUG nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 683.041813] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 683.042140] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-04f7d3bd-f843-464b-8c34-d6e20b839f13 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.052445] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 683.052445] env[68279]: value = "task-2962638" [ 683.052445] env[68279]: _type = "Task" [ 683.052445] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.066020] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.073714] env[68279]: DEBUG nova.network.neutron [-] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.121873] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962636, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.134286] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.134286] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.147761] env[68279]: DEBUG nova.policy [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5cd7e44689a40d993e5da3165332fd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '295e4a23df6e4d029636d514484434e5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 683.516118] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962637, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.531595] env[68279]: DEBUG nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.566236] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962638, 'name': CreateSnapshot_Task, 'duration_secs': 0.441184} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.566236] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 683.566236] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec41f8a3-0d3c-4cdc-9760-3aaf1b59d048 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.575025] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.581701] env[68279]: INFO nova.compute.manager [-] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Took 2.08 seconds to deallocate network for instance. [ 683.629660] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962636, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.843954] env[68279]: DEBUG nova.compute.manager [req-43107735-039e-40b0-8025-cacdcbaa775b req-dc1e100b-8a8a-455a-9d7c-aaf6f026c9c2 service nova] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Received event network-vif-deleted-5486afba-2c1e-409f-8d28-00f3c857d1a3 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.018848] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962637, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.096969] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 684.100711] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.100980] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ff38bba9-027b-456c-ba77-c6e465fea1e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.112566] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 684.112566] env[68279]: value = "task-2962639" [ 684.112566] env[68279]: _type = "Task" [ 684.112566] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.131351] env[68279]: DEBUG oslo_vmware.api [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962636, 'name': PowerOnVM_Task, 'duration_secs': 1.064256} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.134973] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 684.135171] env[68279]: INFO nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Took 9.70 seconds to spawn the instance on the hypervisor. [ 684.135446] env[68279]: DEBUG nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 684.136212] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962639, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.137062] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fdf9a8-6a1f-4ed9-ad64-0130269896ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.160403] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a927ec2-ccc8-447f-9911-4644eb515095 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.171202] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9baca4-c0bd-47d4-8d73-ae246b7ccc22 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.215868] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6f4c8b-e4b3-4823-8acf-0174fb0053bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.225184] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbe9a4d-94b3-4381-8bc2-73e4fadf283f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.247342] env[68279]: DEBUG nova.compute.provider_tree [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.310684] env[68279]: DEBUG nova.network.neutron [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Successfully created port: f0460457-e89a-40df-b773-9139c4f14b41 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.519322] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962637, 'name': CloneVM_Task, 'duration_secs': 1.429868} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.520132] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Created linked-clone VM from snapshot [ 684.521795] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3043d6b-7908-474f-a1af-5e7cab87a156 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.532382] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Uploading image dc7517bd-abab-4454-8171-7659976dc6f1 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 684.544187] env[68279]: DEBUG nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.565600] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 684.565600] env[68279]: value = "vm-594518" [ 684.565600] env[68279]: _type = "VirtualMachine" [ 684.565600] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 684.565994] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-754c779c-825e-475f-ad68-5563180638e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.578325] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.578592] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.578748] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.578926] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.580372] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.580868] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.581170] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.581372] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.581668] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.581909] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.582165] env[68279]: DEBUG nova.virt.hardware [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.583526] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f1e5d1-7f72-4fd5-a6dd-54520e588bd1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.588687] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lease: (returnval){ [ 684.588687] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dfecd2-eeed-75c1-72da-b345f41d85be" [ 684.588687] env[68279]: _type = "HttpNfcLease" [ 684.588687] env[68279]: } obtained for exporting VM: (result){ [ 684.588687] env[68279]: value = "vm-594518" [ 684.588687] env[68279]: _type = "VirtualMachine" [ 684.588687] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 684.588937] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the lease: (returnval){ [ 684.588937] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dfecd2-eeed-75c1-72da-b345f41d85be" [ 684.588937] env[68279]: _type = "HttpNfcLease" [ 684.588937] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 684.600810] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfcf297-ef82-4853-acbf-132482d2914b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.607142] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 684.607142] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dfecd2-eeed-75c1-72da-b345f41d85be" [ 684.607142] env[68279]: _type = "HttpNfcLease" [ 684.607142] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 684.607626] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 684.607626] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dfecd2-eeed-75c1-72da-b345f41d85be" [ 684.607626] env[68279]: _type = "HttpNfcLease" [ 684.607626] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 684.609405] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58d3cd1-c73e-4b8e-9d9c-20fa0a727f62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.629971] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f81bb-a13d-1860-ca0f-c0f4e6ee9db1/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 684.630230] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f81bb-a13d-1860-ca0f-c0f4e6ee9db1/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 684.637880] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962639, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.707013] env[68279]: INFO nova.compute.manager [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Took 33.74 seconds to build instance. [ 684.751050] env[68279]: DEBUG nova.scheduler.client.report [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 684.767809] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-799647cf-2f89-4d85-9f9f-2df23a8fde78 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.136792] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962639, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.212447] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3d8635df-8d87-4479-91fd-97340e225dc1 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.558s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.256988] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.735s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.257081] env[68279]: DEBUG nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 685.260645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.268s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.260879] env[68279]: DEBUG nova.objects.instance [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lazy-loading 'resources' on Instance uuid b40956fc-66f5-4bb6-8763-22465bb221bf {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 685.486946] env[68279]: DEBUG nova.compute.manager [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.487044] env[68279]: DEBUG nova.compute.manager [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing instance network info cache due to event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 685.487283] env[68279]: DEBUG oslo_concurrency.lockutils [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] Acquiring lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.487714] env[68279]: DEBUG oslo_concurrency.lockutils [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] Acquired lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.488026] env[68279]: DEBUG nova.network.neutron [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.636026] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962639, 'name': CloneVM_Task, 'duration_secs': 1.417576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.636026] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Created linked-clone VM from snapshot [ 685.636451] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233b80ff-d103-4fd7-8895-c4e2c6546dd7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.646152] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Uploading image 3a51b02a-37d8-4844-b035-fb63bd3c4d9d {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 685.675124] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 685.675124] env[68279]: value = "vm-594520" [ 685.675124] env[68279]: _type = "VirtualMachine" [ 685.675124] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 685.675124] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-da2c8029-870e-4f10-b207-a6dab0a0eaac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.684385] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease: (returnval){ [ 685.684385] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5265ba67-2f53-d758-6453-6cf31a91422e" [ 685.684385] env[68279]: _type = "HttpNfcLease" [ 685.684385] env[68279]: } obtained for exporting VM: (result){ [ 685.684385] env[68279]: value = "vm-594520" [ 685.684385] env[68279]: _type = "VirtualMachine" [ 685.684385] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 685.685070] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the lease: (returnval){ [ 685.685070] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5265ba67-2f53-d758-6453-6cf31a91422e" [ 685.685070] env[68279]: _type = "HttpNfcLease" [ 685.685070] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 685.698305] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 685.698305] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5265ba67-2f53-d758-6453-6cf31a91422e" [ 685.698305] env[68279]: _type = "HttpNfcLease" [ 685.698305] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 685.698305] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 685.698305] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5265ba67-2f53-d758-6453-6cf31a91422e" [ 685.698305] env[68279]: _type = "HttpNfcLease" [ 685.698305] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 685.699918] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82acfcdd-226b-4bcb-89f4-a6eb27fe7089 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.710253] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b487d9-c4ad-e2ad-d9ce-ee39606fd1f8/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 685.710976] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b487d9-c4ad-e2ad-d9ce-ee39606fd1f8/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 685.777933] env[68279]: DEBUG nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 685.783822] env[68279]: DEBUG nova.compute.utils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 685.789778] env[68279]: DEBUG nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 685.789778] env[68279]: DEBUG nova.network.neutron [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 685.844403] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-38835e2a-f26e-40e6-965e-8f390771d96c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.898644] env[68279]: DEBUG nova.policy [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e0f7b3e2c7140a0ba437b49aa138343', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16ad9a0a64524c9d897c11a1ae95dfe4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 686.292277] env[68279]: DEBUG nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 686.333821] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.487303] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45a6b94-0e90-4ba8-b13b-88286c7c11b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.498460] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e97ebc-a006-471e-b673-2a85b1df042d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.542269] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400e886f-8065-4769-877b-1093b0a26a04 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.553919] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e005a22-6a88-4bb2-b654-afb76b3f1e57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.575564] env[68279]: DEBUG nova.compute.provider_tree [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.638669] env[68279]: DEBUG nova.network.neutron [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updated VIF entry in instance network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.641574] env[68279]: DEBUG nova.network.neutron [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15317896-8b", "ovs_interfaceid": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.084292] env[68279]: DEBUG nova.scheduler.client.report [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 687.149825] env[68279]: DEBUG oslo_concurrency.lockutils [req-a223f91b-436d-4836-b885-449db1c993e7 req-705f394d-a4c6-4ed0-a833-3a4118fd5f01 service nova] Releasing lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.320587] env[68279]: DEBUG nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 687.391225] env[68279]: DEBUG nova.network.neutron [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Successfully updated port: f0460457-e89a-40df-b773-9139c4f14b41 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.395032] env[68279]: DEBUG nova.network.neutron [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Successfully created port: 87a60716-63ed-4918-b9f9-0ae21f3aa5d9 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.594417] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.331s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.596465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.177s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.598108] env[68279]: INFO nova.compute.claims [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.634700] env[68279]: INFO nova.scheduler.client.report [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Deleted allocations for instance b40956fc-66f5-4bb6-8763-22465bb221bf [ 687.841101] env[68279]: DEBUG nova.compute.manager [req-2c33ed32-cdd2-46e4-b6fb-dcc20a71eccc req-3f3c1bfd-0966-4302-a8e4-51c76d467b1f service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Received event network-vif-plugged-f0460457-e89a-40df-b773-9139c4f14b41 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 687.841444] env[68279]: DEBUG oslo_concurrency.lockutils [req-2c33ed32-cdd2-46e4-b6fb-dcc20a71eccc req-3f3c1bfd-0966-4302-a8e4-51c76d467b1f service nova] Acquiring lock "95f0aeaa-75ab-4fd9-b28d-e43703429167-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.841759] env[68279]: DEBUG oslo_concurrency.lockutils [req-2c33ed32-cdd2-46e4-b6fb-dcc20a71eccc req-3f3c1bfd-0966-4302-a8e4-51c76d467b1f service nova] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.842037] env[68279]: DEBUG oslo_concurrency.lockutils [req-2c33ed32-cdd2-46e4-b6fb-dcc20a71eccc req-3f3c1bfd-0966-4302-a8e4-51c76d467b1f service nova] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.842354] env[68279]: DEBUG nova.compute.manager [req-2c33ed32-cdd2-46e4-b6fb-dcc20a71eccc req-3f3c1bfd-0966-4302-a8e4-51c76d467b1f service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] No waiting events found dispatching network-vif-plugged-f0460457-e89a-40df-b773-9139c4f14b41 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.842607] env[68279]: WARNING nova.compute.manager [req-2c33ed32-cdd2-46e4-b6fb-dcc20a71eccc req-3f3c1bfd-0966-4302-a8e4-51c76d467b1f service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Received unexpected event network-vif-plugged-f0460457-e89a-40df-b773-9139c4f14b41 for instance with vm_state building and task_state spawning. [ 687.897182] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "refresh_cache-95f0aeaa-75ab-4fd9-b28d-e43703429167" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.897182] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "refresh_cache-95f0aeaa-75ab-4fd9-b28d-e43703429167" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.897182] env[68279]: DEBUG nova.network.neutron [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.149507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-db4e5a6e-93fb-4a51-bb5b-335e05ac7eca tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "b40956fc-66f5-4bb6-8763-22465bb221bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.633s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.441216] env[68279]: DEBUG nova.network.neutron [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.588868] env[68279]: DEBUG nova.network.neutron [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Updating instance_info_cache with network_info: [{"id": "f0460457-e89a-40df-b773-9139c4f14b41", "address": "fa:16:3e:dd:38:e9", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0460457-e8", "ovs_interfaceid": "f0460457-e89a-40df-b773-9139c4f14b41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.046222] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197e5621-7d07-4d0d-a426-51e75a571914 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.055247] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e712b3-a93a-4c07-ae14-90e10d1d8a21 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.088696] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea34815-88cf-48fc-a049-946456249fbd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.091719] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "refresh_cache-95f0aeaa-75ab-4fd9-b28d-e43703429167" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.092048] env[68279]: DEBUG nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Instance network_info: |[{"id": "f0460457-e89a-40df-b773-9139c4f14b41", "address": "fa:16:3e:dd:38:e9", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0460457-e8", "ovs_interfaceid": "f0460457-e89a-40df-b773-9139c4f14b41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.092464] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:38:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0460457-e89a-40df-b773-9139c4f14b41', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.100669] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.101320] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.101320] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-439cce38-d200-4abe-9d29-2b772d8b9ebb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.119891] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d434dbe1-43d0-40bb-a594-6cd3a26e5369 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.125310] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.125310] env[68279]: value = "task-2962642" [ 689.125310] env[68279]: _type = "Task" [ 689.125310] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.137611] env[68279]: DEBUG nova.compute.provider_tree [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.145307] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962642, 'name': CreateVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.153025] env[68279]: DEBUG nova.network.neutron [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Successfully updated port: 87a60716-63ed-4918-b9f9-0ae21f3aa5d9 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.637553] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962642, 'name': CreateVM_Task, 'duration_secs': 0.487215} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.637748] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 689.638603] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.638798] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.639189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 689.639531] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eee0d13c-2616-459d-9c77-20f3fe1366c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.642730] env[68279]: DEBUG nova.scheduler.client.report [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.650397] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 689.650397] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523797e6-0c0e-97eb-0625-0352fd30b47c" [ 689.650397] env[68279]: _type = "Task" [ 689.650397] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.655465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "refresh_cache-6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.655629] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquired lock "refresh_cache-6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.655801] env[68279]: DEBUG nova.network.neutron [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.662902] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523797e6-0c0e-97eb-0625-0352fd30b47c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.149061] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.149375] env[68279]: DEBUG nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 690.151806] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.854s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.153308] env[68279]: INFO nova.compute.claims [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.171365] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523797e6-0c0e-97eb-0625-0352fd30b47c, 'name': SearchDatastore_Task, 'duration_secs': 0.015995} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.171906] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.171906] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.172172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.172315] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.172497] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.173926] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2f2e2a1-ccb2-4faf-b3ea-0b0319e2fb5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.185946] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.186154] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.187121] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-375ac058-215d-431b-b542-327099db84b8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.194389] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 690.194389] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522cc780-9ec1-2eeb-947b-3ab72b76556a" [ 690.194389] env[68279]: _type = "Task" [ 690.194389] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.199426] env[68279]: DEBUG nova.network.neutron [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.208891] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522cc780-9ec1-2eeb-947b-3ab72b76556a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.352080] env[68279]: DEBUG nova.network.neutron [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Updating instance_info_cache with network_info: [{"id": "87a60716-63ed-4918-b9f9-0ae21f3aa5d9", "address": "fa:16:3e:58:0a:fd", "network": {"id": "194d7f5a-e924-4637-92df-06d1cd50d6e8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1077011166-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16ad9a0a64524c9d897c11a1ae95dfe4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc6d5964-1106-4345-a26d-185dabd4ff0f", "external-id": "nsx-vlan-transportzone-603", "segmentation_id": 603, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87a60716-63", "ovs_interfaceid": "87a60716-63ed-4918-b9f9-0ae21f3aa5d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.665910] env[68279]: DEBUG nova.compute.utils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 690.666108] env[68279]: DEBUG nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 690.666201] env[68279]: DEBUG nova.network.neutron [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 690.706890] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522cc780-9ec1-2eeb-947b-3ab72b76556a, 'name': SearchDatastore_Task, 'duration_secs': 0.017251} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.707761] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1adb383-234a-45b4-a1f5-fe51ca8d2ac9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.711435] env[68279]: DEBUG nova.policy [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a874555472d4eb79a3a3765b659f936', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abc2a94d5ee444449c6c4d088263440a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 690.717405] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 690.717405] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5230c615-cf39-9af4-9fb5-45a242685ce7" [ 690.717405] env[68279]: _type = "Task" [ 690.717405] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.725929] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5230c615-cf39-9af4-9fb5-45a242685ce7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.855168] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Releasing lock "refresh_cache-6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.855457] env[68279]: DEBUG nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Instance network_info: |[{"id": "87a60716-63ed-4918-b9f9-0ae21f3aa5d9", "address": "fa:16:3e:58:0a:fd", "network": {"id": "194d7f5a-e924-4637-92df-06d1cd50d6e8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1077011166-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16ad9a0a64524c9d897c11a1ae95dfe4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc6d5964-1106-4345-a26d-185dabd4ff0f", "external-id": "nsx-vlan-transportzone-603", "segmentation_id": 603, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87a60716-63", "ovs_interfaceid": "87a60716-63ed-4918-b9f9-0ae21f3aa5d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 691.001859] env[68279]: DEBUG nova.network.neutron [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Successfully created port: a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.170021] env[68279]: DEBUG nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 691.235141] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5230c615-cf39-9af4-9fb5-45a242685ce7, 'name': SearchDatastore_Task, 'duration_secs': 0.016507} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.235456] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.235769] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 95f0aeaa-75ab-4fd9-b28d-e43703429167/95f0aeaa-75ab-4fd9-b28d-e43703429167.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.236110] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c50890ae-95c5-483b-8538-36667d32f3bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.249946] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 691.249946] env[68279]: value = "task-2962643" [ 691.249946] env[68279]: _type = "Task" [ 691.249946] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.262517] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.635179] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b37e0e4-0122-45f4-8a9e-e613d172b95e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.646079] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b47269-0106-4a16-a17a-38abc717805f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.684387] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036e628a-01c1-4026-9d57-3b1e36ee62ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.697250] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e7bd55-db56-49a3-b9b1-633c003eff6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.718763] env[68279]: DEBUG nova.compute.provider_tree [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.762215] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962643, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.185113] env[68279]: DEBUG nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 692.222519] env[68279]: DEBUG nova.scheduler.client.report [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 692.261908] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962643, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584426} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.262275] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 95f0aeaa-75ab-4fd9-b28d-e43703429167/95f0aeaa-75ab-4fd9-b28d-e43703429167.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 692.262537] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 692.262881] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d3768ec-15f0-47f2-9db9-54f42bcbeee5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.272968] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 692.272968] env[68279]: value = "task-2962644" [ 692.272968] env[68279]: _type = "Task" [ 692.272968] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.281955] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962644, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.494010] env[68279]: DEBUG nova.network.neutron [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Successfully updated port: a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 692.727714] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.576s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.728342] env[68279]: DEBUG nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 692.731532] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.549s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.731761] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.731939] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 692.732270] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.343s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.733880] env[68279]: INFO nova.compute.claims [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.737562] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf677c0a-62eb-4af6-96ab-0c193251add6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.747287] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6876752-f801-49e9-b731-dc14af89264b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.763967] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc80d24-0e39-407f-9d35-06dfccfaa7f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.773056] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92293ab0-fc99-4f66-b703-fffa6449f631 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.786462] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962644, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074758} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.812973] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.813903] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179613MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 692.814055] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.814909] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3381d7-f1b3-460d-aad3-3018d03170a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.839317] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 95f0aeaa-75ab-4fd9-b28d-e43703429167/95f0aeaa-75ab-4fd9-b28d-e43703429167.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.839726] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4830c7c-5bee-4f08-bce3-134166e5a38e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.862006] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 692.862006] env[68279]: value = "task-2962645" [ 692.862006] env[68279]: _type = "Task" [ 692.862006] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.872890] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962645, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.940585] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 692.940841] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.940993] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 692.941198] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.941359] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 692.942182] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 692.942182] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 692.942182] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 692.942182] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 692.942182] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 692.942404] env[68279]: DEBUG nova.virt.hardware [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 692.944596] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701db4e0-0f5e-4f9f-856f-c8823d1b6cdb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.954021] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 692.954021] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.954021] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 692.954021] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.954021] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 692.954021] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 692.954021] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 692.954546] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 692.955019] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 692.955294] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 692.955567] env[68279]: DEBUG nova.virt.hardware [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 692.960166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3437055-afc7-4278-b122-d75e26b4bb78 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.966640] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63821098-5d6d-49ed-aea3-8a4789e0e5c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.973427] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f81bb-a13d-1860-ca0f-c0f4e6ee9db1/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 692.975776] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efd55b0-8ea6-4265-aee5-3139e8a0db45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.982249] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee2ded8-dfa4-4197-9754-089d561a6bb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.996330] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b487d9-c4ad-e2ad-d9ce-ee39606fd1f8/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 692.997016] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:0a:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc6d5964-1106-4345-a26d-185dabd4ff0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87a60716-63ed-4918-b9f9-0ae21f3aa5d9', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.005508] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Creating folder: Project (16ad9a0a64524c9d897c11a1ae95dfe4). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.005835] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f81bb-a13d-1860-ca0f-c0f4e6ee9db1/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 693.005957] env[68279]: ERROR oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f81bb-a13d-1860-ca0f-c0f4e6ee9db1/disk-0.vmdk due to incomplete transfer. [ 693.006585] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.006723] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquired lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.006877] env[68279]: DEBUG nova.network.neutron [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 693.008777] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd2153f-59f7-445f-ae3d-df7814bec397 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.012386] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0cdfe4e-ae49-48e6-adca-a55219f1b2cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.014011] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8ee4786c-b0ec-4910-a507-a4754ec78f5a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.030555] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b487d9-c4ad-e2ad-d9ce-ee39606fd1f8/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 693.031783] env[68279]: ERROR oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b487d9-c4ad-e2ad-d9ce-ee39606fd1f8/disk-0.vmdk due to incomplete transfer. [ 693.031783] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-02a0903c-0da8-460c-b637-e45ad0ae34d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.033744] env[68279]: DEBUG oslo_vmware.rw_handles [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f81bb-a13d-1860-ca0f-c0f4e6ee9db1/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 693.033975] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Uploaded image dc7517bd-abab-4454-8171-7659976dc6f1 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 693.037339] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 693.038473] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-348321ed-fd16-4eb5-96fd-f91d757a6848 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.042225] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Created folder: Project (16ad9a0a64524c9d897c11a1ae95dfe4) in parent group-v594445. [ 693.042225] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Creating folder: Instances. Parent ref: group-v594522. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.042225] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2689d7bd-be49-4b8e-ab35-02d271cc3f9d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.043570] env[68279]: DEBUG oslo_vmware.rw_handles [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b487d9-c4ad-e2ad-d9ce-ee39606fd1f8/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 693.043779] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Uploaded image 3a51b02a-37d8-4844-b035-fb63bd3c4d9d to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 693.045930] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 693.047826] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9a014f6e-439a-4b23-ba00-79975a39d5b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.049084] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 693.049084] env[68279]: value = "task-2962648" [ 693.049084] env[68279]: _type = "Task" [ 693.049084] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.055632] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Created folder: Instances in parent group-v594522. [ 693.055871] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.056130] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 693.056130] env[68279]: value = "task-2962649" [ 693.056130] env[68279]: _type = "Task" [ 693.056130] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.057627] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.057627] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb8bfe47-1f57-49bd-898d-ba37ae948c1e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.079514] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962648, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.085462] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962649, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.086747] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.086747] env[68279]: value = "task-2962650" [ 693.086747] env[68279]: _type = "Task" [ 693.086747] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.095727] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962650, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.238625] env[68279]: DEBUG nova.compute.utils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 693.240229] env[68279]: DEBUG nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 693.240310] env[68279]: DEBUG nova.network.neutron [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 693.319078] env[68279]: DEBUG nova.policy [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95a607e2813b4474a1fc52d4580ec15f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e105d6867a24b6fbc867a41adecf830', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 693.380711] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962645, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.559499] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962648, 'name': Destroy_Task, 'duration_secs': 0.442467} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.562850] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Destroyed the VM [ 693.567167] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 693.567465] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9efae9d9-e2e5-43e3-8e66-42b71738db75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.576884] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962649, 'name': Destroy_Task, 'duration_secs': 0.322523} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.579083] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Destroyed the VM [ 693.579390] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 693.579739] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 693.579739] env[68279]: value = "task-2962651" [ 693.579739] env[68279]: _type = "Task" [ 693.579739] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.580750] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e3634088-6854-4332-ae4e-8049df1ed43d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.595675] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 693.595675] env[68279]: value = "task-2962652" [ 693.595675] env[68279]: _type = "Task" [ 693.595675] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.596718] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962651, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.604045] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962650, 'name': CreateVM_Task, 'duration_secs': 0.463939} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.604585] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.605525] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.605835] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.606150] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 693.607106] env[68279]: DEBUG nova.network.neutron [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.609821] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e90d03a0-2c17-473d-942b-3839169f1d6f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.616163] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962652, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.623066] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 693.623066] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a785b9-0629-138d-10a3-84dde3557c3c" [ 693.623066] env[68279]: _type = "Task" [ 693.623066] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.630421] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a785b9-0629-138d-10a3-84dde3557c3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.745803] env[68279]: DEBUG nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 693.879145] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962645, 'name': ReconfigVM_Task, 'duration_secs': 0.822909} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.885806] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 95f0aeaa-75ab-4fd9-b28d-e43703429167/95f0aeaa-75ab-4fd9-b28d-e43703429167.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 693.887798] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06a7104e-2dd4-4258-83ea-341a7b0d57d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.893788] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.894041] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.901770] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 693.901770] env[68279]: value = "task-2962653" [ 693.901770] env[68279]: _type = "Task" [ 693.901770] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.911185] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962653, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.969952] env[68279]: DEBUG nova.network.neutron [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.098745] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962651, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.112559] env[68279]: DEBUG oslo_vmware.api [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962652, 'name': RemoveSnapshot_Task, 'duration_secs': 0.345456} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.112994] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 694.113251] env[68279]: INFO nova.compute.manager [None req-559de488-632e-41e0-a651-ae708548c8d9 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Took 11.62 seconds to snapshot the instance on the hypervisor. [ 694.133182] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a785b9-0629-138d-10a3-84dde3557c3c, 'name': SearchDatastore_Task, 'duration_secs': 0.011174} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.135960] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.136283] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.136522] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.136670] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.136844] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.138201] env[68279]: DEBUG nova.network.neutron [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Successfully created port: bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.142022] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f3548da-86e6-4092-81d2-a97cc6cd6119 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.164174] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.164174] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.167408] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a276517c-e9be-4583-a525-030075eebde9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.176210] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 694.176210] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520c0b85-26d9-788a-aeeb-2ee69b82e22e" [ 694.176210] env[68279]: _type = "Task" [ 694.176210] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.186364] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520c0b85-26d9-788a-aeeb-2ee69b82e22e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.251257] env[68279]: DEBUG nova.compute.manager [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Received event network-changed-f0460457-e89a-40df-b773-9139c4f14b41 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.251526] env[68279]: DEBUG nova.compute.manager [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Refreshing instance network info cache due to event network-changed-f0460457-e89a-40df-b773-9139c4f14b41. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.253015] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Acquiring lock "refresh_cache-95f0aeaa-75ab-4fd9-b28d-e43703429167" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.253015] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Acquired lock "refresh_cache-95f0aeaa-75ab-4fd9-b28d-e43703429167" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.253015] env[68279]: DEBUG nova.network.neutron [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Refreshing network info cache for port f0460457-e89a-40df-b773-9139c4f14b41 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.366346] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447e2ecc-3c91-4d8b-a3c5-89f49f0e7e60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.375366] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9e3eed-d0c4-47ca-8575-557bdde46a14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.411546] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5509c16e-9bf9-4da4-bc19-318c631c4ca2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.425035] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0554c0ac-6100-4705-a590-a5f93459645a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.428478] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962653, 'name': Rename_Task, 'duration_secs': 0.169159} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.428752] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 694.429441] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62ca0e34-abc7-4fd1-b45f-0e2d81ebcca0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.439657] env[68279]: DEBUG nova.compute.provider_tree [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.442381] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 694.442381] env[68279]: value = "task-2962654" [ 694.442381] env[68279]: _type = "Task" [ 694.442381] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.452187] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.472936] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Releasing lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.473293] env[68279]: DEBUG nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Instance network_info: |[{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 694.473754] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:78:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a981d4aa-4af9-4362-9690-4170835dd9b4', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 694.481609] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Creating folder: Project (abc2a94d5ee444449c6c4d088263440a). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.481874] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-072c251f-7bd9-4059-946d-1f1683077480 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.494093] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Created folder: Project (abc2a94d5ee444449c6c4d088263440a) in parent group-v594445. [ 694.494303] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Creating folder: Instances. Parent ref: group-v594525. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.494605] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3916d31d-3753-455e-90fc-113b031d8612 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.505136] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Created folder: Instances in parent group-v594525. [ 694.505379] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.505573] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 694.505781] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fa636f6-cb28-4d5b-938e-2baf6e8e64e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.530616] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.530616] env[68279]: value = "task-2962657" [ 694.530616] env[68279]: _type = "Task" [ 694.530616] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.539817] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962657, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.593699] env[68279]: DEBUG oslo_vmware.api [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962651, 'name': RemoveSnapshot_Task, 'duration_secs': 0.592765} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.594223] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 694.594613] env[68279]: INFO nova.compute.manager [None req-028b037d-f923-4761-89bc-1e799fbd0704 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Took 14.48 seconds to snapshot the instance on the hypervisor. [ 694.689929] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520c0b85-26d9-788a-aeeb-2ee69b82e22e, 'name': SearchDatastore_Task, 'duration_secs': 0.010536} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.690776] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c0d89b4-9fc5-42fc-9fbf-0fd07669d4cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.697150] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 694.697150] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d463ef-1a81-afe5-4322-e12615844a87" [ 694.697150] env[68279]: _type = "Task" [ 694.697150] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.706374] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d463ef-1a81-afe5-4322-e12615844a87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.766107] env[68279]: DEBUG nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 694.796835] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 694.797796] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.798104] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.798448] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.798677] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.798888] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 694.799184] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 694.799435] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 694.799716] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 694.799960] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 694.800216] env[68279]: DEBUG nova.virt.hardware [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 694.801869] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adae643c-b7cd-4104-a659-222a0a8ccfdb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.811479] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918ab84f-114a-4743-94e3-bd61aff9dc46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.943841] env[68279]: DEBUG nova.scheduler.client.report [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.961690] env[68279]: DEBUG oslo_vmware.api [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962654, 'name': PowerOnVM_Task, 'duration_secs': 0.463747} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.962118] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.962331] env[68279]: INFO nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Took 10.42 seconds to spawn the instance on the hypervisor. [ 694.962605] env[68279]: DEBUG nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.963426] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d27a101-8e21-4e87-876e-d499f7b90704 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.041696] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962657, 'name': CreateVM_Task, 'duration_secs': 0.323581} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.041696] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 695.041980] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.042160] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.042472] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 695.042727] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-110d439b-05ec-4292-a043-82c635444e97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.047585] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 695.047585] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b41db0-8ab4-45fb-29d9-8f2e4e72146f" [ 695.047585] env[68279]: _type = "Task" [ 695.047585] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.055375] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b41db0-8ab4-45fb-29d9-8f2e4e72146f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.150730] env[68279]: DEBUG nova.network.neutron [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Updated VIF entry in instance network info cache for port f0460457-e89a-40df-b773-9139c4f14b41. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.151116] env[68279]: DEBUG nova.network.neutron [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Updating instance_info_cache with network_info: [{"id": "f0460457-e89a-40df-b773-9139c4f14b41", "address": "fa:16:3e:dd:38:e9", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0460457-e8", "ovs_interfaceid": "f0460457-e89a-40df-b773-9139c4f14b41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.207736] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d463ef-1a81-afe5-4322-e12615844a87, 'name': SearchDatastore_Task, 'duration_secs': 0.0118} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.208430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.208711] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9/6e947ed2-a6aa-42d4-b97e-31db33f6d5f9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.208969] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb1718a1-2724-413e-9435-f50a221370fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.215892] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 695.215892] env[68279]: value = "task-2962658" [ 695.215892] env[68279]: _type = "Task" [ 695.215892] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.225114] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.456596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.456596] env[68279]: DEBUG nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 695.460254] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.040s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.460539] env[68279]: DEBUG nova.objects.instance [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lazy-loading 'resources' on Instance uuid 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 695.480605] env[68279]: INFO nova.compute.manager [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Took 41.41 seconds to build instance. [ 695.565332] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b41db0-8ab4-45fb-29d9-8f2e4e72146f, 'name': SearchDatastore_Task, 'duration_secs': 0.009976} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.565631] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.565873] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 695.566133] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.566265] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.566473] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.567849] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd14802e-86db-4e1b-b47d-c12eafab4739 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.585693] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.585872] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 695.587023] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2945287-3dab-494c-830e-0df7883f99dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.599473] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 695.599473] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a9cade-a51e-dafd-9ab5-2d1e55077c39" [ 695.599473] env[68279]: _type = "Task" [ 695.599473] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.612836] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a9cade-a51e-dafd-9ab5-2d1e55077c39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.655786] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Releasing lock "refresh_cache-95f0aeaa-75ab-4fd9-b28d-e43703429167" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.656088] env[68279]: DEBUG nova.compute.manager [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Received event network-vif-plugged-87a60716-63ed-4918-b9f9-0ae21f3aa5d9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.656291] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Acquiring lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.656533] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.656703] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.656874] env[68279]: DEBUG nova.compute.manager [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] No waiting events found dispatching network-vif-plugged-87a60716-63ed-4918-b9f9-0ae21f3aa5d9 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 695.657075] env[68279]: WARNING nova.compute.manager [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Received unexpected event network-vif-plugged-87a60716-63ed-4918-b9f9-0ae21f3aa5d9 for instance with vm_state building and task_state spawning. [ 695.657211] env[68279]: DEBUG nova.compute.manager [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Received event network-changed-87a60716-63ed-4918-b9f9-0ae21f3aa5d9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.657366] env[68279]: DEBUG nova.compute.manager [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Refreshing instance network info cache due to event network-changed-87a60716-63ed-4918-b9f9-0ae21f3aa5d9. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 695.657551] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Acquiring lock "refresh_cache-6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.657686] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Acquired lock "refresh_cache-6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.657853] env[68279]: DEBUG nova.network.neutron [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Refreshing network info cache for port 87a60716-63ed-4918-b9f9-0ae21f3aa5d9 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.727118] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493762} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.727402] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9/6e947ed2-a6aa-42d4-b97e-31db33f6d5f9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 695.727619] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.727894] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c974b07-0a48-4aa0-9c7f-e9a07a958bfe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.738519] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 695.738519] env[68279]: value = "task-2962659" [ 695.738519] env[68279]: _type = "Task" [ 695.738519] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.750669] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962659, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.964372] env[68279]: DEBUG nova.compute.utils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 695.966886] env[68279]: DEBUG nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.967309] env[68279]: DEBUG nova.network.neutron [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.983703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-692d8971-eea8-4abc-bf32-34750334a772 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.533s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.061219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.061219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.061219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.061219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.061219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.064547] env[68279]: INFO nova.compute.manager [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Terminating instance [ 696.087811] env[68279]: DEBUG nova.policy [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb958ba000164650b414aab26a54aba1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db718fbefd764ba09477778fefd4e34d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 696.112390] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a9cade-a51e-dafd-9ab5-2d1e55077c39, 'name': SearchDatastore_Task, 'duration_secs': 0.061235} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.116217] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bc9bd5d-3a3b-48e6-95d4-e8d6930fab36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.123711] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 696.123711] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5245e9d7-58f9-65c5-0d5c-abd2fbe84703" [ 696.123711] env[68279]: _type = "Task" [ 696.123711] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.134748] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5245e9d7-58f9-65c5-0d5c-abd2fbe84703, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.238524] env[68279]: DEBUG nova.network.neutron [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Successfully updated port: bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 696.262665] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962659, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.262715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.263719] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbdeb41-b9d0-427b-9dbe-cc19f0822e48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.296446] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9/6e947ed2-a6aa-42d4-b97e-31db33f6d5f9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 696.301083] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7ba1433-dae3-4639-922f-fb4a0fe35604 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.323197] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 696.323197] env[68279]: value = "task-2962660" [ 696.323197] env[68279]: _type = "Task" [ 696.323197] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.334571] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962660, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.451838] env[68279]: DEBUG nova.network.neutron [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Updated VIF entry in instance network info cache for port 87a60716-63ed-4918-b9f9-0ae21f3aa5d9. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 696.452294] env[68279]: DEBUG nova.network.neutron [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Updating instance_info_cache with network_info: [{"id": "87a60716-63ed-4918-b9f9-0ae21f3aa5d9", "address": "fa:16:3e:58:0a:fd", "network": {"id": "194d7f5a-e924-4637-92df-06d1cd50d6e8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1077011166-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16ad9a0a64524c9d897c11a1ae95dfe4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc6d5964-1106-4345-a26d-185dabd4ff0f", "external-id": "nsx-vlan-transportzone-603", "segmentation_id": 603, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87a60716-63", "ovs_interfaceid": "87a60716-63ed-4918-b9f9-0ae21f3aa5d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.471072] env[68279]: DEBUG nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 696.480083] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e45cd4-a42c-4340-8a3a-f0e066d67242 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.484399] env[68279]: DEBUG nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.490181] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab4ed4c-d7d3-4433-a4a4-b2aaa5bb7771 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.531021] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8ce24f-af7c-4025-8aff-cba05d137843 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.540608] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dd1307-3095-42d6-8891-43985ae7ee42 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.556093] env[68279]: DEBUG nova.compute.provider_tree [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.568770] env[68279]: DEBUG nova.compute.manager [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 696.569855] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.570456] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d632fb-4d2c-4744-bec9-f5724777249a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.578639] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 696.578901] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9a71e59-640f-49ae-a160-e1f2cb4a0822 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.635294] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5245e9d7-58f9-65c5-0d5c-abd2fbe84703, 'name': SearchDatastore_Task, 'duration_secs': 0.011107} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.635448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.635599] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/61392426-52b8-437e-ab3d-122d9335cd36.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 696.635867] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13940eb0-6dbc-4618-a4be-76f7f12b8c18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.644264] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 696.644264] env[68279]: value = "task-2962662" [ 696.644264] env[68279]: _type = "Task" [ 696.644264] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.648997] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 696.649264] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 696.649521] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleting the datastore file [datastore2] 92786813-f4ab-4ff7-8597-aa1aa90eeb01 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 696.650167] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab5eeb50-8086-4126-bf74-02086de228d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.655298] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.660602] env[68279]: DEBUG oslo_vmware.api [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 696.660602] env[68279]: value = "task-2962663" [ 696.660602] env[68279]: _type = "Task" [ 696.660602] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.668948] env[68279]: DEBUG oslo_vmware.api [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.754641] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.754775] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquired lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.754983] env[68279]: DEBUG nova.network.neutron [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.806650] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "d452e3d2-1590-4352-8406-31d85b2921f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.806908] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "d452e3d2-1590-4352-8406-31d85b2921f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.840632] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962660, 'name': ReconfigVM_Task, 'duration_secs': 0.282518} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.840968] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9/6e947ed2-a6aa-42d4-b97e-31db33f6d5f9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.841782] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42b91155-6d78-4da9-bec0-8c1ebdf7db2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.853364] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 696.853364] env[68279]: value = "task-2962664" [ 696.853364] env[68279]: _type = "Task" [ 696.853364] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.865089] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962664, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.956095] env[68279]: DEBUG oslo_concurrency.lockutils [req-16e34a11-8eb9-421d-bcbe-ac93338e9a1e req-199748bc-677c-4122-a483-d45b58099e36 service nova] Releasing lock "refresh_cache-6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.983334] env[68279]: DEBUG nova.network.neutron [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Successfully created port: c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.011338] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.060068] env[68279]: DEBUG nova.scheduler.client.report [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.158964] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962662, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.178234] env[68279]: DEBUG oslo_vmware.api [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161835} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.178550] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.178740] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.179697] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.179940] env[68279]: INFO nova.compute.manager [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Took 0.61 seconds to destroy the instance on the hypervisor. [ 697.180295] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.180531] env[68279]: DEBUG nova.compute.manager [-] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 697.180676] env[68279]: DEBUG nova.network.neutron [-] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.327506] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received event network-vif-plugged-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.327791] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Acquiring lock "61392426-52b8-437e-ab3d-122d9335cd36-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.328023] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Lock "61392426-52b8-437e-ab3d-122d9335cd36-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.328217] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Lock "61392426-52b8-437e-ab3d-122d9335cd36-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.328676] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] No waiting events found dispatching network-vif-plugged-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.328676] env[68279]: WARNING nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received unexpected event network-vif-plugged-a981d4aa-4af9-4362-9690-4170835dd9b4 for instance with vm_state building and task_state spawning. [ 697.328877] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.329016] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing instance network info cache due to event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 697.329838] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Acquiring lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.330174] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Acquired lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.330410] env[68279]: DEBUG nova.network.neutron [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.335202] env[68279]: DEBUG nova.network.neutron [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.365617] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962664, 'name': Rename_Task, 'duration_secs': 0.414559} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.368473] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 697.369171] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f4babe4-59f9-4237-9f8f-cbdf6bc2ec0e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.378533] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 697.378533] env[68279]: value = "task-2962665" [ 697.378533] env[68279]: _type = "Task" [ 697.378533] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.389083] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.484284] env[68279]: DEBUG nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 697.518047] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 697.518440] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.518779] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 697.519747] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.520068] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 697.520383] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 697.520860] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 697.522053] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 697.522053] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 697.522053] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 697.522053] env[68279]: DEBUG nova.virt.hardware [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 697.522947] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf17b992-4a63-4e08-92fe-9ad7628d2f84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.536021] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d643cd9-80f2-4da7-908b-1403c69cab16 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.570261] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.571686] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.838s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.572616] env[68279]: INFO nova.compute.claims [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.594633] env[68279]: INFO nova.scheduler.client.report [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Deleted allocations for instance 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4 [ 697.661063] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962662, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534237} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.664022] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/61392426-52b8-437e-ab3d-122d9335cd36.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.664022] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.664022] env[68279]: DEBUG nova.network.neutron [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Updating instance_info_cache with network_info: [{"id": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "address": "fa:16:3e:18:e4:1b", "network": {"id": "52acc4ef-d1e2-42b3-ad40-a9225964adef", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-539857656-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e105d6867a24b6fbc867a41adecf830", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfea9973-01", "ovs_interfaceid": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.664844] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93f237bb-cc66-4fe0-bd5d-ecf323cd3e0d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.675039] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 697.675039] env[68279]: value = "task-2962666" [ 697.675039] env[68279]: _type = "Task" [ 697.675039] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.688457] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.746977] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "01a624d3-782d-44cf-8a4e-05a85ac91c64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.747160] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.894769] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962665, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.898407] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.898784] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.898874] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.899202] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.899270] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.901557] env[68279]: INFO nova.compute.manager [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Terminating instance [ 698.104978] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9a283464-b8f6-4408-bbbd-ff6d6cd03f9f tempest-ServerPasswordTestJSON-1245861442 tempest-ServerPasswordTestJSON-1245861442-project-member] Lock "9d4b56df-11d9-4d94-94f3-6c5e27ea85f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.777s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.124277] env[68279]: DEBUG nova.network.neutron [-] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.159197] env[68279]: DEBUG nova.network.neutron [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updated VIF entry in instance network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 698.159645] env[68279]: DEBUG nova.network.neutron [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.168934] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Releasing lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.169791] env[68279]: DEBUG nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Instance network_info: |[{"id": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "address": "fa:16:3e:18:e4:1b", "network": {"id": "52acc4ef-d1e2-42b3-ad40-a9225964adef", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-539857656-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e105d6867a24b6fbc867a41adecf830", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfea9973-01", "ovs_interfaceid": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 698.169791] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:e4:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfea9973-01ad-4d9c-a033-e6abdbcd8c3c', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.177261] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Creating folder: Project (9e105d6867a24b6fbc867a41adecf830). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.178280] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c263de10-5710-437b-ac3a-eff28490919f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.191379] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069217} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.191686] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 698.193488] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8810b996-8fda-4919-9ade-56f0cc40c2ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.197333] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Created folder: Project (9e105d6867a24b6fbc867a41adecf830) in parent group-v594445. [ 698.197528] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Creating folder: Instances. Parent ref: group-v594528. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.197755] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55234e39-9b1f-410e-9ab3-1d5ff1c2784d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.219623] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/61392426-52b8-437e-ab3d-122d9335cd36.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 698.220500] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-183e5a4b-4aad-4c28-b28c-671179993935 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.236704] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Created folder: Instances in parent group-v594528. [ 698.237047] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 698.237638] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 698.238421] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fba414b-8105-4804-9be7-1ceb37a8090d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.255083] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 698.255083] env[68279]: value = "task-2962669" [ 698.255083] env[68279]: _type = "Task" [ 698.255083] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.261166] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.261166] env[68279]: value = "task-2962670" [ 698.261166] env[68279]: _type = "Task" [ 698.261166] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.270259] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962669, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.275894] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962670, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.389806] env[68279]: DEBUG oslo_vmware.api [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962665, 'name': PowerOnVM_Task, 'duration_secs': 0.534567} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.390243] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 698.390563] env[68279]: INFO nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Took 11.07 seconds to spawn the instance on the hypervisor. [ 698.390881] env[68279]: DEBUG nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 698.392235] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9b4fba-093c-4f56-8130-150a3d6c9fe6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.408111] env[68279]: DEBUG nova.compute.manager [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 698.408590] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 698.409595] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a211d553-dec8-4de4-b6df-37c1584cb7a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.415064] env[68279]: DEBUG nova.compute.manager [req-6ef65861-467e-4c73-acad-fc81e9dee9f8 req-32d30bf6-3df0-4850-b007-d7a71a42542f service nova] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Received event network-vif-deleted-103f3058-f969-4e2c-bb38-2c0fa06ba731 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 698.421339] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 698.421624] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c79ffc5-61d3-4586-b022-b881c40d72fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.440019] env[68279]: DEBUG oslo_vmware.api [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 698.440019] env[68279]: value = "task-2962671" [ 698.440019] env[68279]: _type = "Task" [ 698.440019] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.450475] env[68279]: DEBUG oslo_vmware.api [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.627658] env[68279]: INFO nova.compute.manager [-] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Took 1.45 seconds to deallocate network for instance. [ 698.663702] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Releasing lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.664099] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Received event network-vif-plugged-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 698.664333] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Acquiring lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.664550] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.664832] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.664903] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] No waiting events found dispatching network-vif-plugged-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 698.665053] env[68279]: WARNING nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Received unexpected event network-vif-plugged-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c for instance with vm_state building and task_state spawning. [ 698.665219] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Received event network-changed-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 698.665370] env[68279]: DEBUG nova.compute.manager [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Refreshing instance network info cache due to event network-changed-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 698.665553] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Acquiring lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.665686] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Acquired lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.665967] env[68279]: DEBUG nova.network.neutron [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Refreshing network info cache for port bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 698.769242] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962669, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.784397] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962670, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.913874] env[68279]: INFO nova.compute.manager [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Took 41.99 seconds to build instance. [ 698.954298] env[68279]: DEBUG oslo_vmware.api [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962671, 'name': PowerOffVM_Task, 'duration_secs': 0.303936} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.957029] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 698.957201] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 698.957641] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95df0848-7ed8-4362-b3e1-c4e9cb5c1dfd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.020521] env[68279]: DEBUG nova.network.neutron [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Successfully updated port: c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.031928] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 699.032164] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 699.032379] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Deleting the datastore file [datastore1] 010e5bfc-814c-4bde-8a16-7c2009ee13b6 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 699.033035] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af3523e5-60cc-4052-b587-2d219625eef1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.041027] env[68279]: DEBUG oslo_vmware.api [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for the task: (returnval){ [ 699.041027] env[68279]: value = "task-2962673" [ 699.041027] env[68279]: _type = "Task" [ 699.041027] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.052840] env[68279]: DEBUG oslo_vmware.api [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.136507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "7858163d-8e68-4565-b1e0-ecd2e9be350d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.136733] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.137585] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.192725] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112eb46e-ccd0-4b10-93d2-a120dcd81d6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.201630] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9a5cbe-111a-44c1-bb57-c3b3096e8d11 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.236093] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046eeba1-7626-49ff-a815-28f6565bca17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.244481] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5701bb55-f657-4089-b961-ebc5cda7442a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.260273] env[68279]: DEBUG nova.compute.provider_tree [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.273068] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962669, 'name': ReconfigVM_Task, 'duration_secs': 0.56386} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.276334] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/61392426-52b8-437e-ab3d-122d9335cd36.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 699.277122] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962670, 'name': CreateVM_Task, 'duration_secs': 0.544333} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.277316] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-475e41ca-c9a8-443f-bbc8-502207591808 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.278839] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 699.279500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.279661] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.280265] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 699.280537] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c80aa85-293c-4441-9600-1b6cfa099131 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.286063] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 699.286063] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529f0952-4867-4a01-7576-39d41498931f" [ 699.286063] env[68279]: _type = "Task" [ 699.286063] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.287464] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 699.287464] env[68279]: value = "task-2962674" [ 699.287464] env[68279]: _type = "Task" [ 699.287464] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.299603] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529f0952-4867-4a01-7576-39d41498931f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.302701] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962674, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.420913] env[68279]: DEBUG oslo_concurrency.lockutils [None req-763a6d54-212e-494f-b204-74bb3f0e0511 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.630s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.523053] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.523053] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquired lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.523180] env[68279]: DEBUG nova.network.neutron [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.554766] env[68279]: DEBUG oslo_vmware.api [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Task: {'id': task-2962673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364208} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.554766] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 699.554766] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 699.554766] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.554766] env[68279]: INFO nova.compute.manager [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 699.555156] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.555156] env[68279]: DEBUG nova.compute.manager [-] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 699.555156] env[68279]: DEBUG nova.network.neutron [-] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.673591] env[68279]: DEBUG nova.network.neutron [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Updated VIF entry in instance network info cache for port bfea9973-01ad-4d9c-a033-e6abdbcd8c3c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 699.673953] env[68279]: DEBUG nova.network.neutron [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Updating instance_info_cache with network_info: [{"id": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "address": "fa:16:3e:18:e4:1b", "network": {"id": "52acc4ef-d1e2-42b3-ad40-a9225964adef", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-539857656-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e105d6867a24b6fbc867a41adecf830", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfea9973-01", "ovs_interfaceid": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.765694] env[68279]: DEBUG nova.scheduler.client.report [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 699.809990] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529f0952-4867-4a01-7576-39d41498931f, 'name': SearchDatastore_Task, 'duration_secs': 0.020651} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.812399] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.812827] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 699.813193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.813468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.813779] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 699.814499] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962674, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.816026] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a60b903-373e-4c97-96f2-52d774df3813 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.829212] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 699.829212] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 699.829212] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42b26be-e3f3-4297-8e3d-267b817a3d5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.836620] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 699.836620] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a6bdf2-f10c-3a2c-71d7-887f9518ab50" [ 699.836620] env[68279]: _type = "Task" [ 699.836620] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.846218] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a6bdf2-f10c-3a2c-71d7-887f9518ab50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.926527] env[68279]: DEBUG nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 700.057269] env[68279]: DEBUG nova.compute.manager [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Received event network-vif-plugged-c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 700.057491] env[68279]: DEBUG oslo_concurrency.lockutils [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] Acquiring lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.057812] env[68279]: DEBUG oslo_concurrency.lockutils [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.058188] env[68279]: DEBUG oslo_concurrency.lockutils [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.058438] env[68279]: DEBUG nova.compute.manager [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] No waiting events found dispatching network-vif-plugged-c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 700.058605] env[68279]: WARNING nova.compute.manager [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Received unexpected event network-vif-plugged-c342af3f-5da3-465c-a8da-2b93c20697f7 for instance with vm_state building and task_state spawning. [ 700.058764] env[68279]: DEBUG nova.compute.manager [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Received event network-changed-c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 700.058913] env[68279]: DEBUG nova.compute.manager [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Refreshing instance network info cache due to event network-changed-c342af3f-5da3-465c-a8da-2b93c20697f7. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 700.059089] env[68279]: DEBUG oslo_concurrency.lockutils [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] Acquiring lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.084227] env[68279]: DEBUG nova.compute.manager [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 700.085166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b218d683-b29b-4dda-ab83-a604a42d6857 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.092985] env[68279]: DEBUG nova.network.neutron [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.177529] env[68279]: DEBUG oslo_concurrency.lockutils [req-febd4c47-1ddc-4160-aa6c-d74e3df77b9c req-ea67cd31-3cef-42df-b934-abca16e2c3ee service nova] Releasing lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.277206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.277206] env[68279]: DEBUG nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 700.279293] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.943s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.280755] env[68279]: INFO nova.compute.claims [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.308857] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962674, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.350021] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a6bdf2-f10c-3a2c-71d7-887f9518ab50, 'name': SearchDatastore_Task, 'duration_secs': 0.045886} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.353014] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67f76419-9da4-4f2e-ab81-925686b28185 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.357954] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 700.357954] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526cc301-d1b5-c83c-af41-b139dda9c8bd" [ 700.357954] env[68279]: _type = "Task" [ 700.357954] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.369687] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526cc301-d1b5-c83c-af41-b139dda9c8bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.450574] env[68279]: DEBUG nova.network.neutron [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Updating instance_info_cache with network_info: [{"id": "c342af3f-5da3-465c-a8da-2b93c20697f7", "address": "fa:16:3e:de:ee:33", "network": {"id": "7c818ed5-5313-4f1e-8806-0ff14d2daa10", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1357119540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db718fbefd764ba09477778fefd4e34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc342af3f-5d", "ovs_interfaceid": "c342af3f-5da3-465c-a8da-2b93c20697f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.458584] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.607387] env[68279]: INFO nova.compute.manager [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] instance snapshotting [ 700.613250] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2512d3-cc3a-4201-8bb8-2ec3017e7890 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.641794] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f39e48-8415-4b50-89a1-a5bfece3dd29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.746092] env[68279]: DEBUG nova.network.neutron [-] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.780970] env[68279]: DEBUG nova.compute.utils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 700.783872] env[68279]: DEBUG nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 700.783872] env[68279]: DEBUG nova.network.neutron [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 700.812055] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962674, 'name': Rename_Task, 'duration_secs': 1.304083} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.812383] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 700.812942] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fdbe3f39-21ed-4ae0-82fa-7ce582f1efab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.821421] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 700.821421] env[68279]: value = "task-2962675" [ 700.821421] env[68279]: _type = "Task" [ 700.821421] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.833112] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.837128] env[68279]: DEBUG nova.policy [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '928096fa57e3440abd3055ed158f567b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07ddf058e7d043439c7088865e742b6e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 700.870556] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526cc301-d1b5-c83c-af41-b139dda9c8bd, 'name': SearchDatastore_Task, 'duration_secs': 0.015117} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.871291] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.871678] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e/b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 700.874073] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b66f6af-0083-45f2-a27a-9ace3ab39041 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.884709] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 700.884709] env[68279]: value = "task-2962676" [ 700.884709] env[68279]: _type = "Task" [ 700.884709] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.892700] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.957138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Releasing lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.957637] env[68279]: DEBUG nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Instance network_info: |[{"id": "c342af3f-5da3-465c-a8da-2b93c20697f7", "address": "fa:16:3e:de:ee:33", "network": {"id": "7c818ed5-5313-4f1e-8806-0ff14d2daa10", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1357119540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db718fbefd764ba09477778fefd4e34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc342af3f-5d", "ovs_interfaceid": "c342af3f-5da3-465c-a8da-2b93c20697f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 700.958392] env[68279]: DEBUG oslo_concurrency.lockutils [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] Acquired lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.958700] env[68279]: DEBUG nova.network.neutron [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Refreshing network info cache for port c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 700.960108] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:ee:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c342af3f-5da3-465c-a8da-2b93c20697f7', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.969333] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Creating folder: Project (db718fbefd764ba09477778fefd4e34d). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 700.970749] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a35782d-7d83-4d6b-949f-2255681fa6e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.986705] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Created folder: Project (db718fbefd764ba09477778fefd4e34d) in parent group-v594445. [ 700.986705] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Creating folder: Instances. Parent ref: group-v594531. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 700.986705] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c6888a4-5e17-4a08-9566-7be5b22a4a7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.000104] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Created folder: Instances in parent group-v594531. [ 701.000104] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.000104] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 701.000104] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c04f53b3-0504-407a-967b-27fbd361faef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.020223] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 701.020223] env[68279]: value = "task-2962679" [ 701.020223] env[68279]: _type = "Task" [ 701.020223] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.032394] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962679, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.156667] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 701.157147] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-40fdb704-48cc-45bc-ac41-ac30c4d1fc9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.169638] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 701.169638] env[68279]: value = "task-2962680" [ 701.169638] env[68279]: _type = "Task" [ 701.169638] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.180151] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962680, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.250437] env[68279]: INFO nova.compute.manager [-] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Took 1.69 seconds to deallocate network for instance. [ 701.291058] env[68279]: DEBUG nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 701.337408] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962675, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.398912] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962676, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.534331] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962679, 'name': CreateVM_Task, 'duration_secs': 0.473733} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.537402] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 701.538310] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.538569] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.538963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 701.539266] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-478b8288-2e95-4c70-8a13-16c687225861 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.546379] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 701.546379] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5207430d-9423-357b-19db-326674e03206" [ 701.546379] env[68279]: _type = "Task" [ 701.546379] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.558958] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5207430d-9423-357b-19db-326674e03206, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.623706] env[68279]: DEBUG nova.network.neutron [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Successfully created port: 152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.678834] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962680, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.758906] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.817079] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5a7dbc-72fe-477e-bf3f-693f1575680e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.829389] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fc0915-64bc-4f14-8db3-35c207c5fbc3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.840686] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962675, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.866819] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cb42c0-30ce-4c56-8057-00924184289d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.875561] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5af0eb4-b1cf-4001-b4f1-2533e272148c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.892122] env[68279]: DEBUG nova.compute.provider_tree [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.902633] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962676, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74221} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.904050] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e/b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 701.904315] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 701.904591] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e0456f3-7ecb-4274-85c8-2d5eacdfd77b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.913292] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 701.913292] env[68279]: value = "task-2962681" [ 701.913292] env[68279]: _type = "Task" [ 701.913292] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.922273] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962681, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.059318] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5207430d-9423-357b-19db-326674e03206, 'name': SearchDatastore_Task, 'duration_secs': 0.020319} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.059658] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.060076] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 702.060146] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.060297] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.060473] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 702.060743] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c3b0948-60fe-43a7-b923-6e8408921a31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.072589] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.072844] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 702.074416] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6c359e0-c529-4fc3-850e-7a86ee4f7514 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.080459] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 702.080459] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe8c6c-d790-7298-2b95-8768a0467d82" [ 702.080459] env[68279]: _type = "Task" [ 702.080459] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.090247] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe8c6c-d790-7298-2b95-8768a0467d82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.180146] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962680, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.309520] env[68279]: DEBUG nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 702.340149] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962675, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.342345] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 702.342345] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.342345] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 702.342345] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.342345] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 702.342608] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 702.342736] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 702.343239] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 702.343239] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 702.343239] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 702.343412] env[68279]: DEBUG nova.virt.hardware [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 702.344234] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cedc6ce-0824-4d9f-a234-5044edeef24e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.354368] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddd8188-411c-4955-b62d-df0e3c3c9b7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.399148] env[68279]: DEBUG nova.scheduler.client.report [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.427567] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962681, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.258784} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.427567] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 702.427567] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cda355c-8e61-424c-9684-c222610a1af8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.458228] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e/b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 702.458604] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ef34b1e-4a72-4f5c-b326-492a286e6062 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.480535] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 702.480535] env[68279]: value = "task-2962682" [ 702.480535] env[68279]: _type = "Task" [ 702.480535] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.491830] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.509967] env[68279]: DEBUG nova.network.neutron [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Updated VIF entry in instance network info cache for port c342af3f-5da3-465c-a8da-2b93c20697f7. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 702.510415] env[68279]: DEBUG nova.network.neutron [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Updating instance_info_cache with network_info: [{"id": "c342af3f-5da3-465c-a8da-2b93c20697f7", "address": "fa:16:3e:de:ee:33", "network": {"id": "7c818ed5-5313-4f1e-8806-0ff14d2daa10", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1357119540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db718fbefd764ba09477778fefd4e34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc342af3f-5d", "ovs_interfaceid": "c342af3f-5da3-465c-a8da-2b93c20697f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.596773] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe8c6c-d790-7298-2b95-8768a0467d82, 'name': SearchDatastore_Task, 'duration_secs': 0.024988} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.599072] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12eb57ed-dd4f-4b0a-a49c-f129a95d5527 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.604789] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 702.604789] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214313a-11ae-314f-9c47-4f6ea072f4be" [ 702.604789] env[68279]: _type = "Task" [ 702.604789] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.615390] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214313a-11ae-314f-9c47-4f6ea072f4be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.681673] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962680, 'name': CreateSnapshot_Task, 'duration_secs': 1.420158} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.682148] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 702.682928] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ad773c-0559-4e95-a6e4-2cf3dacf38f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.840325] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962675, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.904078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.904078] env[68279]: DEBUG nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 702.910772] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 24.493s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.993440] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962682, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.015321] env[68279]: DEBUG oslo_concurrency.lockutils [req-da510634-9907-4324-b6fa-230c51c589c6 req-99b9024c-c239-408e-9165-30950f57f36a service nova] Releasing lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.122944] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214313a-11ae-314f-9c47-4f6ea072f4be, 'name': SearchDatastore_Task, 'duration_secs': 0.01531} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.123232] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.123489] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e/932663fb-ea20-48d2-b6e8-2d3b32bbdd8e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 703.123748] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e62c867b-72a9-427a-b988-f72dc96930d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.131924] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 703.131924] env[68279]: value = "task-2962683" [ 703.131924] env[68279]: _type = "Task" [ 703.131924] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.137506] env[68279]: DEBUG nova.compute.manager [req-2fbaea90-c4de-474f-b213-8dad1033cf79 req-d1a767bc-bea1-49fd-a545-42bbc3ed7a66 service nova] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Received event network-vif-deleted-2c75e839-da7d-4baa-85d0-ea0ad60abf2c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.145133] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.206489] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 703.207378] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-57febcbb-0ab0-4ad9-ba18-9c24c8337f33 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.217331] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 703.217331] env[68279]: value = "task-2962684" [ 703.217331] env[68279]: _type = "Task" [ 703.217331] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.225729] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962684, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.336284] env[68279]: DEBUG oslo_vmware.api [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962675, 'name': PowerOnVM_Task, 'duration_secs': 2.060685} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.336611] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 703.336801] env[68279]: INFO nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Took 11.15 seconds to spawn the instance on the hypervisor. [ 703.336986] env[68279]: DEBUG nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 703.337773] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4021a11e-8d26-4d01-bc97-3e262c9537a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.391759] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.391934] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.410355] env[68279]: DEBUG nova.compute.utils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 703.411908] env[68279]: DEBUG nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 703.412136] env[68279]: DEBUG nova.network.neutron [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 703.493290] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962682, 'name': ReconfigVM_Task, 'duration_secs': 0.53299} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.493628] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Reconfigured VM instance instance-0000001c to attach disk [datastore1] b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e/b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 703.494331] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ace51db-1798-4d11-b201-54ec5eb5956b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.506765] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 703.506765] env[68279]: value = "task-2962685" [ 703.506765] env[68279]: _type = "Task" [ 703.506765] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.514459] env[68279]: DEBUG nova.policy [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aec08ac14cb6415d82527dbf841df852', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a4b54f28bbb494eac219d3b81692ed1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 703.525379] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962685, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.651174] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962683, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.735575] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962684, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.863203] env[68279]: INFO nova.compute.manager [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Took 38.47 seconds to build instance. [ 703.922550] env[68279]: DEBUG nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 704.017895] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962685, 'name': Rename_Task, 'duration_secs': 0.321423} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.019055] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.019849] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a74bed-1d7c-446c-9586-169e5e66265b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.022350] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8dfe04f4-594c-4e96-a3a9-b2ab35325161 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.030248] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b79c0e1-89e9-4c12-a3ff-34b7d5b95e51 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.035388] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 704.035388] env[68279]: value = "task-2962686" [ 704.035388] env[68279]: _type = "Task" [ 704.035388] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.073416] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3a4d8a-40be-42dd-9877-59d34d83dc9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.080648] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962686, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.087415] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288bac90-a7b3-46ce-80c8-ca56921303f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.107021] env[68279]: DEBUG nova.compute.provider_tree [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.146121] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773142} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.146700] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e/932663fb-ea20-48d2-b6e8-2d3b32bbdd8e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 704.147025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 704.147025] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cfb321a-6b35-4418-941b-065d60b665d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.156038] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 704.156038] env[68279]: value = "task-2962687" [ 704.156038] env[68279]: _type = "Task" [ 704.156038] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.166516] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.233235] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962684, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.350141] env[68279]: DEBUG nova.network.neutron [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Successfully created port: 8023cef8-1786-40b1-a2f3-0692fb595915 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.367853] env[68279]: DEBUG oslo_concurrency.lockutils [None req-406c18c4-3705-4499-8818-6e3a51d4be0c tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "61392426-52b8-437e-ab3d-122d9335cd36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.647s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.529039] env[68279]: DEBUG nova.network.neutron [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Successfully updated port: 152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 704.550028] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962686, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.613021] env[68279]: DEBUG nova.scheduler.client.report [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 704.623450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.623450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.623783] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.624254] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.625142] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.632352] env[68279]: INFO nova.compute.manager [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Terminating instance [ 704.670162] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081725} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.670162] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 704.670162] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2171b308-e1fe-4bb2-8770-0646e87f40a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.700254] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e/932663fb-ea20-48d2-b6e8-2d3b32bbdd8e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 704.700254] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85e49764-e826-442a-9174-1b5005986d15 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.728416] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 704.728416] env[68279]: value = "task-2962688" [ 704.728416] env[68279]: _type = "Task" [ 704.728416] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.735790] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962684, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.742678] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962688, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.871820] env[68279]: DEBUG nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 704.933835] env[68279]: DEBUG nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 704.966852] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.967263] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.967328] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.967507] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.967656] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.967811] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.968137] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.968193] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.968429] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.969392] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.969392] env[68279]: DEBUG nova.virt.hardware [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.970358] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c80579a-383c-48fe-9bf8-9755b722bafa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.981598] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a015b154-73f1-4600-b900-4f9267029291 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.033103] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.033270] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquired lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.033424] env[68279]: DEBUG nova.network.neutron [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.047095] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962686, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.139528] env[68279]: DEBUG nova.compute.manager [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 705.139759] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 705.141677] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08c3374-2184-437e-b237-6b4283a80b1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.153033] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 705.153033] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89aea76c-83cb-4e84-9dde-e054738366cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.163755] env[68279]: DEBUG oslo_vmware.api [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 705.163755] env[68279]: value = "task-2962689" [ 705.163755] env[68279]: _type = "Task" [ 705.163755] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.174292] env[68279]: DEBUG oslo_vmware.api [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962689, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.242521] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962684, 'name': CloneVM_Task, 'duration_secs': 1.707645} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.243224] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Created linked-clone VM from snapshot [ 705.244017] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5db76b-438b-4bc7-8e23-4436f63d2dee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.252283] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962688, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.258210] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Uploading image f99b615c-d0f5-4c83-90d8-a6ea39213bc6 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 705.275344] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 705.277473] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4e91174c-28ea-45ce-a6cb-252409b69969 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.287562] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 705.287562] env[68279]: value = "task-2962690" [ 705.287562] env[68279]: _type = "Task" [ 705.287562] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.299363] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962690, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.401872] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.504327] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.504659] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.551093] env[68279]: DEBUG oslo_vmware.api [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2962686, 'name': PowerOnVM_Task, 'duration_secs': 1.020085} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.551093] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.551093] env[68279]: INFO nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Took 10.78 seconds to spawn the instance on the hypervisor. [ 705.551093] env[68279]: DEBUG nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.551093] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67eed77-db7a-4dd6-90f7-ba16254cce95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.572530] env[68279]: DEBUG nova.network.neutron [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.631850] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.722s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.635215] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.982s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.636792] env[68279]: INFO nova.compute.claims [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.677291] env[68279]: DEBUG oslo_vmware.api [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962689, 'name': PowerOffVM_Task, 'duration_secs': 0.357995} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.677291] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 705.677291] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 705.677291] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6484c86-e047-4605-8063-751f2371278d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.740025] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962688, 'name': ReconfigVM_Task, 'duration_secs': 0.607759} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.740342] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e/932663fb-ea20-48d2-b6e8-2d3b32bbdd8e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.740966] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-502324e3-82d7-46a6-9f25-662c59eb4ff7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.748926] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 705.748926] env[68279]: value = "task-2962692" [ 705.748926] env[68279]: _type = "Task" [ 705.748926] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.758249] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962692, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.798242] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962690, 'name': Destroy_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.813173] env[68279]: DEBUG nova.compute.manager [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Received event network-vif-plugged-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 705.813326] env[68279]: DEBUG oslo_concurrency.lockutils [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] Acquiring lock "f7db383a-648a-4984-ae25-72bc2ccfe369-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.813651] env[68279]: DEBUG oslo_concurrency.lockutils [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.813750] env[68279]: DEBUG oslo_concurrency.lockutils [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.813940] env[68279]: DEBUG nova.compute.manager [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] No waiting events found dispatching network-vif-plugged-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 705.814349] env[68279]: WARNING nova.compute.manager [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Received unexpected event network-vif-plugged-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 for instance with vm_state building and task_state spawning. [ 705.814544] env[68279]: DEBUG nova.compute.manager [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Received event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 705.814763] env[68279]: DEBUG nova.compute.manager [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing instance network info cache due to event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 705.814866] env[68279]: DEBUG oslo_concurrency.lockutils [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] Acquiring lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.838035] env[68279]: DEBUG nova.network.neutron [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.071569] env[68279]: INFO nova.compute.manager [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Took 39.80 seconds to build instance. [ 706.214945] env[68279]: INFO nova.scheduler.client.report [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Deleted allocation for migration 8c57a7eb-23a4-49a8-bb18-9efe945bfe06 [ 706.265942] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962692, 'name': Rename_Task, 'duration_secs': 0.158851} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.265942] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.265942] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30ebaef4-3227-483d-bbd7-216dc406fb34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.272450] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 706.272450] env[68279]: value = "task-2962693" [ 706.272450] env[68279]: _type = "Task" [ 706.272450] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.284181] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.301553] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962690, 'name': Destroy_Task, 'duration_secs': 0.559147} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.302405] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Destroyed the VM [ 706.302405] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 706.302617] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9aded57b-4371-495c-bc00-0fd6ca835931 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.310179] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 706.310179] env[68279]: value = "task-2962694" [ 706.310179] env[68279]: _type = "Task" [ 706.310179] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.321077] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962694, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.340624] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Releasing lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.341078] env[68279]: DEBUG nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Instance network_info: |[{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 706.341325] env[68279]: DEBUG oslo_concurrency.lockutils [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] Acquired lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.341506] env[68279]: DEBUG nova.network.neutron [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.342812] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:6f:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24210a23-d8ac-4f4f-84ac-dc0636de9a72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '152b3aa4-9e41-4813-87ce-2c7cfd51fae1', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.351637] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Creating folder: Project (07ddf058e7d043439c7088865e742b6e). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.352283] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8faa2b03-9e9a-4f91-b6f7-714589bbf426 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.366466] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Created folder: Project (07ddf058e7d043439c7088865e742b6e) in parent group-v594445. [ 706.366677] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Creating folder: Instances. Parent ref: group-v594536. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.366960] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c9a3d0b-3f92-4437-8b8e-648f647da2ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.380812] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Created folder: Instances in parent group-v594536. [ 706.380812] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.380812] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.380812] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e36da7ac-952a-4de1-8e4b-7011e76344ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.408085] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.408085] env[68279]: value = "task-2962697" [ 706.408085] env[68279]: _type = "Task" [ 706.408085] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.417391] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962697, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.450028] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 706.450028] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 706.450028] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Deleting the datastore file [datastore1] 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 706.450028] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5426a62a-02f6-453a-abd0-19faa143c6ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.459620] env[68279]: DEBUG oslo_vmware.api [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for the task: (returnval){ [ 706.459620] env[68279]: value = "task-2962698" [ 706.459620] env[68279]: _type = "Task" [ 706.459620] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.472173] env[68279]: DEBUG oslo_vmware.api [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962698, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.575130] env[68279]: DEBUG oslo_concurrency.lockutils [None req-025bdb32-b740-4644-95a4-db6033b2a68a tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.425s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.611499] env[68279]: DEBUG nova.network.neutron [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Successfully updated port: 8023cef8-1786-40b1-a2f3-0692fb595915 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.720928] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80cb09b1-29ea-4a66-8cdf-d6c15ea8a8a1 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 31.362s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.747598] env[68279]: INFO nova.compute.manager [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Rescuing [ 706.747861] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.748052] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquired lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.748287] env[68279]: DEBUG nova.network.neutron [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.788284] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962693, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.823185] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962694, 'name': RemoveSnapshot_Task} progress is 54%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.921551] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962697, 'name': CreateVM_Task, 'duration_secs': 0.364506} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.921832] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.922887] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.923543] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.923989] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.924275] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f493f214-5121-4ca6-ba4f-a022082e6dfc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.933930] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 706.933930] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522b602f-1552-f1b6-f788-c7b9bd505c17" [ 706.933930] env[68279]: _type = "Task" [ 706.933930] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.944468] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522b602f-1552-f1b6-f788-c7b9bd505c17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.968717] env[68279]: DEBUG oslo_vmware.api [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Task: {'id': task-2962698, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184786} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.971357] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 706.971549] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 706.971730] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.971896] env[68279]: INFO nova.compute.manager [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Took 1.83 seconds to destroy the instance on the hypervisor. [ 706.972153] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.972523] env[68279]: DEBUG nova.compute.manager [-] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 706.972621] env[68279]: DEBUG nova.network.neutron [-] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 707.079020] env[68279]: DEBUG nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 707.118459] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "refresh_cache-f927c34a-f155-4a1f-8151-b16a3cb3e9a1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.118459] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquired lock "refresh_cache-f927c34a-f155-4a1f-8151-b16a3cb3e9a1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.118757] env[68279]: DEBUG nova.network.neutron [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.157694] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f491e7e-1f58-49ac-908d-6765ceba72bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.165865] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20a6620-a9cf-4001-b0ff-d33a8b7dc2ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.198414] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474529a3-f10f-49ef-94c7-1fbcf429aa6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.208266] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cada5752-941c-4e42-bd6a-5250a38e7f40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.223200] env[68279]: DEBUG nova.compute.provider_tree [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.282985] env[68279]: DEBUG oslo_vmware.api [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2962693, 'name': PowerOnVM_Task, 'duration_secs': 0.515348} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.283265] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.283462] env[68279]: INFO nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Took 9.80 seconds to spawn the instance on the hypervisor. [ 707.283636] env[68279]: DEBUG nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 707.284415] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f6c22d-b817-4ba4-be30-348506abf4e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.321410] env[68279]: DEBUG oslo_vmware.api [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962694, 'name': RemoveSnapshot_Task, 'duration_secs': 0.955102} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.324884] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 707.369300] env[68279]: DEBUG nova.network.neutron [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updated VIF entry in instance network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.369690] env[68279]: DEBUG nova.network.neutron [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.462019] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522b602f-1552-f1b6-f788-c7b9bd505c17, 'name': SearchDatastore_Task, 'duration_secs': 0.012434} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.462019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.462019] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.462019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.462019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.462019] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.462019] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1796ca0-246f-4283-9e41-c297a2bc6ce7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.474768] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.474910] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.475816] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-386dfbfa-f8c5-452a-95f4-fc0d52953dd8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.483037] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 707.483037] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5277ba6b-b54c-31ff-7f6d-2181b10e0235" [ 707.483037] env[68279]: _type = "Task" [ 707.483037] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.492011] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5277ba6b-b54c-31ff-7f6d-2181b10e0235, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.581449] env[68279]: DEBUG nova.network.neutron [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.607902] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.670288] env[68279]: DEBUG nova.network.neutron [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.728650] env[68279]: DEBUG nova.scheduler.client.report [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.804737] env[68279]: INFO nova.compute.manager [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Took 38.44 seconds to build instance. [ 707.831023] env[68279]: WARNING nova.compute.manager [None req-e519d755-d582-4322-a468-0e85e291b09c tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Image not found during snapshot: nova.exception.ImageNotFound: Image f99b615c-d0f5-4c83-90d8-a6ea39213bc6 could not be found. [ 707.874277] env[68279]: DEBUG oslo_concurrency.lockutils [req-ca2576dd-a11a-46f4-b754-0a37b0db976b req-69e982b7-7de0-4472-a6a5-fb664c1d2b39 service nova] Releasing lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.895499] env[68279]: DEBUG nova.network.neutron [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Updating instance_info_cache with network_info: [{"id": "8023cef8-1786-40b1-a2f3-0692fb595915", "address": "fa:16:3e:a8:1f:7c", "network": {"id": "852f6e1f-ecd8-4c6d-aa3d-1f5ce5fbf5bf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1810846394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a4b54f28bbb494eac219d3b81692ed1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8023cef8-17", "ovs_interfaceid": "8023cef8-1786-40b1-a2f3-0692fb595915", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.994831] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5277ba6b-b54c-31ff-7f6d-2181b10e0235, 'name': SearchDatastore_Task, 'duration_secs': 0.016937} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.995699] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b89d041-8508-42aa-9237-9b7fcc18b4b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.001899] env[68279]: DEBUG nova.network.neutron [-] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.003220] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 708.003220] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5298d30c-cde3-2179-e424-d371c652d687" [ 708.003220] env[68279]: _type = "Task" [ 708.003220] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.014053] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5298d30c-cde3-2179-e424-d371c652d687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.085819] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Releasing lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.151171] env[68279]: DEBUG nova.compute.manager [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Received event network-vif-plugged-8023cef8-1786-40b1-a2f3-0692fb595915 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 708.151681] env[68279]: DEBUG oslo_concurrency.lockutils [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] Acquiring lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.151839] env[68279]: DEBUG oslo_concurrency.lockutils [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.152067] env[68279]: DEBUG oslo_concurrency.lockutils [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.152250] env[68279]: DEBUG nova.compute.manager [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] No waiting events found dispatching network-vif-plugged-8023cef8-1786-40b1-a2f3-0692fb595915 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 708.152420] env[68279]: WARNING nova.compute.manager [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Received unexpected event network-vif-plugged-8023cef8-1786-40b1-a2f3-0692fb595915 for instance with vm_state building and task_state spawning. [ 708.152809] env[68279]: DEBUG nova.compute.manager [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Received event network-changed-8023cef8-1786-40b1-a2f3-0692fb595915 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 708.153100] env[68279]: DEBUG nova.compute.manager [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Refreshing instance network info cache due to event network-changed-8023cef8-1786-40b1-a2f3-0692fb595915. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 708.153226] env[68279]: DEBUG oslo_concurrency.lockutils [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] Acquiring lock "refresh_cache-f927c34a-f155-4a1f-8151-b16a3cb3e9a1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.237924] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.238448] env[68279]: DEBUG nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 708.242134] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.282s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.242364] env[68279]: DEBUG nova.objects.instance [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lazy-loading 'resources' on Instance uuid deea2dea-1860-45a0-9637-ced09bb51b81 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 708.306999] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed35af25-f670-4527-a65b-566571906a81 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.008s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.322718] env[68279]: DEBUG nova.compute.manager [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Received event network-changed-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 708.322916] env[68279]: DEBUG nova.compute.manager [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Refreshing instance network info cache due to event network-changed-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 708.323213] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] Acquiring lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.323300] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] Acquired lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.323430] env[68279]: DEBUG nova.network.neutron [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Refreshing network info cache for port bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.398042] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Releasing lock "refresh_cache-f927c34a-f155-4a1f-8151-b16a3cb3e9a1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.398430] env[68279]: DEBUG nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Instance network_info: |[{"id": "8023cef8-1786-40b1-a2f3-0692fb595915", "address": "fa:16:3e:a8:1f:7c", "network": {"id": "852f6e1f-ecd8-4c6d-aa3d-1f5ce5fbf5bf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1810846394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a4b54f28bbb494eac219d3b81692ed1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8023cef8-17", "ovs_interfaceid": "8023cef8-1786-40b1-a2f3-0692fb595915", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 708.398737] env[68279]: DEBUG oslo_concurrency.lockutils [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] Acquired lock "refresh_cache-f927c34a-f155-4a1f-8151-b16a3cb3e9a1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.398918] env[68279]: DEBUG nova.network.neutron [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Refreshing network info cache for port 8023cef8-1786-40b1-a2f3-0692fb595915 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.400139] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:1f:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be5c038c-29e5-43c9-91ab-9eb3094b5337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8023cef8-1786-40b1-a2f3-0692fb595915', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.408092] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Creating folder: Project (5a4b54f28bbb494eac219d3b81692ed1). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 708.408930] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-efec8460-7671-4e77-908f-6a8dc22bbcc0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.423257] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Created folder: Project (5a4b54f28bbb494eac219d3b81692ed1) in parent group-v594445. [ 708.423257] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Creating folder: Instances. Parent ref: group-v594539. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 708.423370] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74409cf8-f0e7-4667-9f20-3a4e8c2c36a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.435072] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Created folder: Instances in parent group-v594539. [ 708.435309] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 708.435522] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 708.435762] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31f376a4-584f-43ae-8e2a-70e5a87e8db5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.456281] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.456281] env[68279]: value = "task-2962701" [ 708.456281] env[68279]: _type = "Task" [ 708.456281] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.466445] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962701, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.510127] env[68279]: INFO nova.compute.manager [-] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Took 1.54 seconds to deallocate network for instance. [ 708.519317] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5298d30c-cde3-2179-e424-d371c652d687, 'name': SearchDatastore_Task, 'duration_secs': 0.015786} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.519942] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.520777] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f7db383a-648a-4984-ae25-72bc2ccfe369/f7db383a-648a-4984-ae25-72bc2ccfe369.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.520889] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed7e1ff8-ec49-4678-90a3-326886d881d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.531797] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 708.531797] env[68279]: value = "task-2962702" [ 708.531797] env[68279]: _type = "Task" [ 708.531797] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.545225] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.748623] env[68279]: DEBUG nova.compute.utils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 708.750370] env[68279]: DEBUG nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 708.754447] env[68279]: DEBUG nova.network.neutron [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 708.811614] env[68279]: DEBUG nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 708.855134] env[68279]: DEBUG nova.policy [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6033843a67f742c59b63c9e972e5b456', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '144a0c8fec0d452fa465a921e9128d37', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 708.978864] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962701, 'name': CreateVM_Task, 'duration_secs': 0.374774} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.979071] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 708.980301] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.980502] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.980901] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 708.981881] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eff306b-9327-4a65-b0bc-657c3a13a869 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.994923] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 708.994923] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528d9086-ad55-f29c-2b3f-2e467396b94f" [ 708.994923] env[68279]: _type = "Task" [ 708.994923] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.014485] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528d9086-ad55-f29c-2b3f-2e467396b94f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.021710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.048119] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962702, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.261408] env[68279]: DEBUG nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 709.356291] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.368395] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81feefbe-d4d3-46c4-a6d9-e1f754deee11 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.377872] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a456bd-9053-4624-b2ed-071363ba6831 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.415044] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00c3542-e235-47f5-9c6b-e207c65be3b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.427118] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ac960c-6e6f-4038-8917-45acb78a8182 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.442689] env[68279]: DEBUG nova.compute.provider_tree [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.496728] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "bf4e6484-d17d-4244-9163-1ef0012874b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.497008] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.497483] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "bf4e6484-d17d-4244-9163-1ef0012874b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.497686] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.497860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.503841] env[68279]: INFO nova.compute.manager [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Terminating instance [ 709.514982] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528d9086-ad55-f29c-2b3f-2e467396b94f, 'name': SearchDatastore_Task, 'duration_secs': 0.059215} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.516389] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.516651] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.516886] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.517047] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.517225] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.517731] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5888f7d1-1e60-4371-bd60-5dd37c71c0f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.528873] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.529110] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 709.529868] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57b19011-45b7-4229-ae8d-ca824df9db08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.543036] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 709.543036] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cef0d8-9ff3-cd15-09b2-9e00b33bc5a6" [ 709.543036] env[68279]: _type = "Task" [ 709.543036] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.548082] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681981} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.552027] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f7db383a-648a-4984-ae25-72bc2ccfe369/f7db383a-648a-4984-ae25-72bc2ccfe369.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.552027] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.552027] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fecc582-89ce-4a72-9b02-b579eeeaf67d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.555799] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cef0d8-9ff3-cd15-09b2-9e00b33bc5a6, 'name': SearchDatastore_Task, 'duration_secs': 0.016266} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.556871] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef0cd168-9c0e-4955-b91c-272b0d35b851 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.562771] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 709.562771] env[68279]: value = "task-2962703" [ 709.562771] env[68279]: _type = "Task" [ 709.562771] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.564185] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 709.564185] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526ff7a6-4c24-b997-812d-6a0d901825ff" [ 709.564185] env[68279]: _type = "Task" [ 709.564185] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.577238] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.580721] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526ff7a6-4c24-b997-812d-6a0d901825ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.593857] env[68279]: DEBUG nova.network.neutron [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Updated VIF entry in instance network info cache for port 8023cef8-1786-40b1-a2f3-0692fb595915. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.593857] env[68279]: DEBUG nova.network.neutron [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Updating instance_info_cache with network_info: [{"id": "8023cef8-1786-40b1-a2f3-0692fb595915", "address": "fa:16:3e:a8:1f:7c", "network": {"id": "852f6e1f-ecd8-4c6d-aa3d-1f5ce5fbf5bf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1810846394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a4b54f28bbb494eac219d3b81692ed1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8023cef8-17", "ovs_interfaceid": "8023cef8-1786-40b1-a2f3-0692fb595915", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.601055] env[68279]: DEBUG nova.network.neutron [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Updated VIF entry in instance network info cache for port bfea9973-01ad-4d9c-a033-e6abdbcd8c3c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.601226] env[68279]: DEBUG nova.network.neutron [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Updating instance_info_cache with network_info: [{"id": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "address": "fa:16:3e:18:e4:1b", "network": {"id": "52acc4ef-d1e2-42b3-ad40-a9225964adef", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-539857656-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e105d6867a24b6fbc867a41adecf830", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfea9973-01", "ovs_interfaceid": "bfea9973-01ad-4d9c-a033-e6abdbcd8c3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.627763] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 709.628056] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86b8ffab-6498-4c3b-9e32-fd7c962ee829 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.635683] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 709.635683] env[68279]: value = "task-2962704" [ 709.635683] env[68279]: _type = "Task" [ 709.635683] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.651336] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.713114] env[68279]: DEBUG nova.network.neutron [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Successfully created port: ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 709.946819] env[68279]: DEBUG nova.scheduler.client.report [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.008541] env[68279]: DEBUG nova.compute.manager [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 710.008796] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 710.009824] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5b6013-2c6a-4dee-ac0f-f3adc9d2bc5b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.024107] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 710.024586] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7774338d-db02-4a4f-88c4-d5984622ac1c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.032653] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 710.032653] env[68279]: value = "task-2962705" [ 710.032653] env[68279]: _type = "Task" [ 710.032653] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.048458] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.084772] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071064} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.088661] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.089077] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526ff7a6-4c24-b997-812d-6a0d901825ff, 'name': SearchDatastore_Task, 'duration_secs': 0.016567} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.092933] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a5ba95-0da6-49b7-80de-da61e30b3300 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.093210] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.093354] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f927c34a-f155-4a1f-8151-b16a3cb3e9a1/f927c34a-f155-4a1f-8151-b16a3cb3e9a1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 710.093956] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3cdbd76-246a-4844-9624-bf113b1d39db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.109970] env[68279]: DEBUG oslo_concurrency.lockutils [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] Releasing lock "refresh_cache-f927c34a-f155-4a1f-8151-b16a3cb3e9a1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.114020] env[68279]: DEBUG nova.compute.manager [req-98aacce1-ce60-44aa-858f-9df28a6de038 req-a4939350-d83c-4bf5-a35c-3596d8e9f126 service nova] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Received event network-vif-deleted-87a60716-63ed-4918-b9f9-0ae21f3aa5d9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 710.114020] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b31aefe-c2de-4d55-8775-bcaa14dc432d req-053abf31-3527-4f63-af34-45faa7c03479 service nova] Releasing lock "refresh_cache-b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.120789] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] f7db383a-648a-4984-ae25-72bc2ccfe369/f7db383a-648a-4984-ae25-72bc2ccfe369.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.122962] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af3565ad-2ba1-4c49-96bf-485d00e6c61a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.148378] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 710.148378] env[68279]: value = "task-2962706" [ 710.148378] env[68279]: _type = "Task" [ 710.148378] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.161380] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 710.161380] env[68279]: value = "task-2962707" [ 710.161380] env[68279]: _type = "Task" [ 710.161380] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.165196] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962704, 'name': PowerOffVM_Task, 'duration_secs': 0.519591} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.171667] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 710.172034] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.172768] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecda62ae-000e-4dfb-8fcc-3ba76c435201 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.183460] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.197726] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bcc9d6-a60b-496d-b9ab-5bbd10fb5526 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.232449] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 710.232748] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4a6e52f-e9ab-4c0f-93dc-c2b3d148fd09 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.241036] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 710.241036] env[68279]: value = "task-2962708" [ 710.241036] env[68279]: _type = "Task" [ 710.241036] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.250130] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962708, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.271309] env[68279]: DEBUG nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 710.294560] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 710.294891] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.295759] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 710.295759] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.295759] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 710.295759] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 710.295942] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 710.296070] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 710.296249] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 710.296505] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 710.296689] env[68279]: DEBUG nova.virt.hardware [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 710.297540] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e383d85-bf14-4d76-94cc-3149073bc20e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.306450] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0888a4-5bd4-4c1e-b2b1-23db651c8035 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.455296] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.211s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.456484] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.884s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.460114] env[68279]: INFO nova.compute.claims [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.484450] env[68279]: INFO nova.scheduler.client.report [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Deleted allocations for instance deea2dea-1860-45a0-9637-ced09bb51b81 [ 710.549470] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962705, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.668404] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962706, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.683474] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.753651] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 710.753846] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 710.754129] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.754291] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.754568] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.754740] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f56d5089-e40d-40bc-8941-074fdf427703 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.770189] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.770426] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 710.771649] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-461ac9b2-26b7-43f4-9744-8d7db315de89 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.781357] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 710.781357] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f1f639-aacd-0afe-71b1-925475fee178" [ 710.781357] env[68279]: _type = "Task" [ 710.781357] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.793383] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f1f639-aacd-0afe-71b1-925475fee178, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.829287] env[68279]: DEBUG nova.compute.manager [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Received event network-changed-c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 710.829553] env[68279]: DEBUG nova.compute.manager [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Refreshing instance network info cache due to event network-changed-c342af3f-5da3-465c-a8da-2b93c20697f7. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 710.829794] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] Acquiring lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.830059] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] Acquired lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.831567] env[68279]: DEBUG nova.network.neutron [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Refreshing network info cache for port c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 710.995404] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6c53cff4-5c30-449e-9900-bdfe6eb40386 tempest-AttachInterfacesV270Test-1100984561 tempest-AttachInterfacesV270Test-1100984561-project-member] Lock "deea2dea-1860-45a0-9637-ced09bb51b81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.758s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.043221] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962705, 'name': PowerOffVM_Task, 'duration_secs': 0.541901} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.043502] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 711.043674] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 711.043933] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f48336a7-92c8-41ce-acdf-e3d3fd1e61e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.122907] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 711.123240] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 711.123430] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleting the datastore file [datastore1] bf4e6484-d17d-4244-9163-1ef0012874b8 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 711.123725] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3b1534b-a923-4fd1-8a8e-2b6a0cc4349f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.131311] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 711.131311] env[68279]: value = "task-2962710" [ 711.131311] env[68279]: _type = "Task" [ 711.131311] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.139880] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.160824] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962706, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.853558} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.161191] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f927c34a-f155-4a1f-8151-b16a3cb3e9a1/f927c34a-f155-4a1f-8151-b16a3cb3e9a1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 711.161431] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.162322] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f06c3880-c0d3-4274-85f6-d4ca1630754d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.170614] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 711.170614] env[68279]: value = "task-2962711" [ 711.170614] env[68279]: _type = "Task" [ 711.170614] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.182111] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.185473] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962707, 'name': ReconfigVM_Task, 'duration_secs': 0.926694} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.185805] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Reconfigured VM instance instance-0000001e to attach disk [datastore2] f7db383a-648a-4984-ae25-72bc2ccfe369/f7db383a-648a-4984-ae25-72bc2ccfe369.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.186524] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd9f3b56-e293-411d-80ea-59232a8aef7f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.195022] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 711.195022] env[68279]: value = "task-2962712" [ 711.195022] env[68279]: _type = "Task" [ 711.195022] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.206837] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962712, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.297528] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f1f639-aacd-0afe-71b1-925475fee178, 'name': SearchDatastore_Task, 'duration_secs': 0.025431} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.298093] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd438dac-33ba-4477-be3c-3fc54027ae05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.307280] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 711.307280] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5287b022-66cd-0d2d-f5af-2236f1a60f3f" [ 711.307280] env[68279]: _type = "Task" [ 711.307280] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.315635] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5287b022-66cd-0d2d-f5af-2236f1a60f3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.626058] env[68279]: DEBUG nova.network.neutron [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Successfully updated port: ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 711.646972] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.690333] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078731} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.690654] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.691652] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2d020b-77b4-472f-a567-5bc7c56def38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.709599] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962712, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.732657] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] f927c34a-f155-4a1f-8151-b16a3cb3e9a1/f927c34a-f155-4a1f-8151-b16a3cb3e9a1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.733294] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fde50c7-9ff6-4c91-8f12-376f96a06a4e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.758631] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 711.758631] env[68279]: value = "task-2962713" [ 711.758631] env[68279]: _type = "Task" [ 711.758631] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.767768] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962713, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.816962] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5287b022-66cd-0d2d-f5af-2236f1a60f3f, 'name': SearchDatastore_Task, 'duration_secs': 0.088485} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.820364] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.820628] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. {{(pid=68279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 711.821861] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b1a383d-45e1-4aab-b3ec-ed5b577a427f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.828374] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 711.828374] env[68279]: value = "task-2962714" [ 711.828374] env[68279]: _type = "Task" [ 711.828374] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.838681] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.871165] env[68279]: DEBUG nova.network.neutron [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Updated VIF entry in instance network info cache for port c342af3f-5da3-465c-a8da-2b93c20697f7. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 711.871531] env[68279]: DEBUG nova.network.neutron [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Updating instance_info_cache with network_info: [{"id": "c342af3f-5da3-465c-a8da-2b93c20697f7", "address": "fa:16:3e:de:ee:33", "network": {"id": "7c818ed5-5313-4f1e-8806-0ff14d2daa10", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1357119540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db718fbefd764ba09477778fefd4e34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc342af3f-5d", "ovs_interfaceid": "c342af3f-5da3-465c-a8da-2b93c20697f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.080400] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2277078-a875-4768-b1fd-b3010fc2608e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.089877] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905de32d-90f8-4a48-9e2a-4696e4f6ab87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.130520] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1bfe69-3a64-419a-8e0d-189b4908d7f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.139290] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.140114] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquired lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.140114] env[68279]: DEBUG nova.network.neutron [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.146898] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0ddaa3-4e7d-4f86-bdee-12ba500fc491 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.159859] env[68279]: DEBUG oslo_vmware.api [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2962710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.677342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.161073] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 712.161073] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 712.161312] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.161584] env[68279]: INFO nova.compute.manager [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Took 2.15 seconds to destroy the instance on the hypervisor. [ 712.161853] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 712.162424] env[68279]: DEBUG nova.compute.manager [-] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 712.162561] env[68279]: DEBUG nova.network.neutron [-] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.174854] env[68279]: DEBUG nova.compute.provider_tree [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.212087] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962712, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.276705] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962713, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.340922] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962714, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.380936] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d989a49-ef69-4452-9cb7-5fbbc43051eb req-938f8867-5e7c-409c-bc87-0cd84f28ac19 service nova] Releasing lock "refresh_cache-932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.685156] env[68279]: DEBUG nova.scheduler.client.report [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.689493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "665d932d-1068-4bb2-835c-2184a80753d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.689714] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.709071] env[68279]: DEBUG nova.network.neutron [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.717845] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962712, 'name': Rename_Task, 'duration_secs': 1.057151} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.718146] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 712.718419] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e01e79c3-394e-4c4e-8c4a-333ce8f689af {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.728649] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 712.728649] env[68279]: value = "task-2962715" [ 712.728649] env[68279]: _type = "Task" [ 712.728649] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.738924] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.771460] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962713, 'name': ReconfigVM_Task, 'duration_secs': 0.670342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.771750] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Reconfigured VM instance instance-0000001f to attach disk [datastore2] f927c34a-f155-4a1f-8151-b16a3cb3e9a1/f927c34a-f155-4a1f-8151-b16a3cb3e9a1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.772735] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ddc51e8-0dcb-4334-b2af-49d213d97528 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.780667] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 712.780667] env[68279]: value = "task-2962716" [ 712.780667] env[68279]: _type = "Task" [ 712.780667] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.790827] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962716, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.849221] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.849527] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. [ 712.850684] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20af031e-e3a9-4b9f-9026-f288894b9f3c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.878109] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.878469] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d54cf1d-7671-476a-8bc8-f7c18c384f75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.899248] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 712.899248] env[68279]: value = "task-2962717" [ 712.899248] env[68279]: _type = "Task" [ 712.899248] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.911766] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962717, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.946929] env[68279]: DEBUG nova.network.neutron [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updating instance_info_cache with network_info: [{"id": "ba399180-a9aa-4428-8f5a-2ca45969e646", "address": "fa:16:3e:81:ed:d2", "network": {"id": "5b08982e-5c8f-43fe-9c87-01afa1634a99", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1752329142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "144a0c8fec0d452fa465a921e9128d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba399180-a9", "ovs_interfaceid": "ba399180-a9aa-4428-8f5a-2ca45969e646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.972366] env[68279]: DEBUG nova.compute.manager [req-faee7a73-f9b3-45b2-bcb8-fb6dd489adf9 req-8c207b73-8a32-40ee-a988-ec658a01c0fb service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Received event network-vif-deleted-4da65432-061e-4e08-a5b9-cb90b33ffc25 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.972366] env[68279]: INFO nova.compute.manager [req-faee7a73-f9b3-45b2-bcb8-fb6dd489adf9 req-8c207b73-8a32-40ee-a988-ec658a01c0fb service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Neutron deleted interface 4da65432-061e-4e08-a5b9-cb90b33ffc25; detaching it from the instance and deleting it from the info cache [ 712.972366] env[68279]: DEBUG nova.network.neutron [req-faee7a73-f9b3-45b2-bcb8-fb6dd489adf9 req-8c207b73-8a32-40ee-a988-ec658a01c0fb service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.994761] env[68279]: DEBUG nova.compute.manager [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Received event network-vif-plugged-ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.994761] env[68279]: DEBUG oslo_concurrency.lockutils [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] Acquiring lock "1d16a5c5-981b-474e-8159-820ac6fcc42d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.994761] env[68279]: DEBUG oslo_concurrency.lockutils [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.995098] env[68279]: DEBUG oslo_concurrency.lockutils [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.995161] env[68279]: DEBUG nova.compute.manager [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] No waiting events found dispatching network-vif-plugged-ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 712.995281] env[68279]: WARNING nova.compute.manager [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Received unexpected event network-vif-plugged-ba399180-a9aa-4428-8f5a-2ca45969e646 for instance with vm_state building and task_state spawning. [ 712.995444] env[68279]: DEBUG nova.compute.manager [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Received event network-changed-ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.996514] env[68279]: DEBUG nova.compute.manager [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Refreshing instance network info cache due to event network-changed-ba399180-a9aa-4428-8f5a-2ca45969e646. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 712.996514] env[68279]: DEBUG oslo_concurrency.lockutils [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] Acquiring lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.194317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.736s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.194317] env[68279]: DEBUG nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 713.196980] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.096s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.198403] env[68279]: DEBUG nova.objects.instance [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lazy-loading 'resources' on Instance uuid 11c439ab-e27c-43e6-b752-c90af5f84bc1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 713.246464] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962715, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.291821] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962716, 'name': Rename_Task, 'duration_secs': 0.205753} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.292161] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 713.292425] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23e2258d-27d4-4fc8-83a1-f7b8f3004435 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.300609] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 713.300609] env[68279]: value = "task-2962718" [ 713.300609] env[68279]: _type = "Task" [ 713.300609] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.310819] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962718, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.312495] env[68279]: DEBUG nova.network.neutron [-] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.410329] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.451195] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Releasing lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.451814] env[68279]: DEBUG nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Instance network_info: |[{"id": "ba399180-a9aa-4428-8f5a-2ca45969e646", "address": "fa:16:3e:81:ed:d2", "network": {"id": "5b08982e-5c8f-43fe-9c87-01afa1634a99", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1752329142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "144a0c8fec0d452fa465a921e9128d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba399180-a9", "ovs_interfaceid": "ba399180-a9aa-4428-8f5a-2ca45969e646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 713.452037] env[68279]: DEBUG oslo_concurrency.lockutils [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] Acquired lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.452262] env[68279]: DEBUG nova.network.neutron [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Refreshing network info cache for port ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.454903] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:ed:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba399180-a9aa-4428-8f5a-2ca45969e646', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 713.463489] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Creating folder: Project (144a0c8fec0d452fa465a921e9128d37). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.464626] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a481a144-61cc-4085-bd65-1b88fa983f9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.477314] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4de1460-dee0-457d-94cc-966bf8fb9711 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.480181] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Created folder: Project (144a0c8fec0d452fa465a921e9128d37) in parent group-v594445. [ 713.480181] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Creating folder: Instances. Parent ref: group-v594542. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.480181] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96be60aa-dd27-4cca-b7f5-912ac02cefec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.488905] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f905768-a1ea-4131-86a8-29a0965b2a2a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.501122] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Created folder: Instances in parent group-v594542. [ 713.501379] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 713.501956] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 713.502192] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c892c442-d63e-47b7-8cbb-3d8a156ed22f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.530785] env[68279]: DEBUG nova.compute.manager [req-faee7a73-f9b3-45b2-bcb8-fb6dd489adf9 req-8c207b73-8a32-40ee-a988-ec658a01c0fb service nova] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Detach interface failed, port_id=4da65432-061e-4e08-a5b9-cb90b33ffc25, reason: Instance bf4e6484-d17d-4244-9163-1ef0012874b8 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 713.536801] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 713.536801] env[68279]: value = "task-2962721" [ 713.536801] env[68279]: _type = "Task" [ 713.536801] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.547237] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962721, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.706834] env[68279]: DEBUG nova.compute.utils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 713.708505] env[68279]: DEBUG nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 713.708693] env[68279]: DEBUG nova.network.neutron [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.743708] env[68279]: DEBUG oslo_vmware.api [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2962715, 'name': PowerOnVM_Task, 'duration_secs': 0.536081} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.743888] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.743930] env[68279]: INFO nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Took 11.43 seconds to spawn the instance on the hypervisor. [ 713.745232] env[68279]: DEBUG nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 713.746450] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2593bb-9c07-4606-a9d3-9ceae04d984d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.793329] env[68279]: DEBUG nova.policy [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a7de63f07f24606ba4927a66d544923', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fd4e00dfba449c5800a22fc37f2c40b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 713.811126] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962718, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.814985] env[68279]: INFO nova.compute.manager [-] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Took 1.65 seconds to deallocate network for instance. [ 713.914424] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962717, 'name': ReconfigVM_Task, 'duration_secs': 0.812953} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.914698] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.915689] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414dae73-61c0-44ec-beeb-02e53586058e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.952336] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c72d947-3398-4ea5-9d2a-5288faa30747 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.972156] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 713.972156] env[68279]: value = "task-2962722" [ 713.972156] env[68279]: _type = "Task" [ 713.972156] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.981454] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.051112] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962721, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.214158] env[68279]: DEBUG nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 714.279573] env[68279]: INFO nova.compute.manager [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Took 41.57 seconds to build instance. [ 714.284716] env[68279]: DEBUG nova.network.neutron [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Successfully created port: 75ef3733-1b15-4793-9073-f9964cbea45d {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.319201] env[68279]: DEBUG oslo_vmware.api [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962718, 'name': PowerOnVM_Task, 'duration_secs': 0.889443} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.319201] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 714.319308] env[68279]: INFO nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Took 9.39 seconds to spawn the instance on the hypervisor. [ 714.319507] env[68279]: DEBUG nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 714.320720] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.321510] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7555cddc-9160-4cff-ae65-fcf96da48149 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.355131] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fa72d9-ae98-4811-8526-026438871fc5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.365018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1a5b39-b5cb-4354-bbc3-0efbaa8af980 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.398533] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979a5360-49ad-4809-a6ad-02d524f20499 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.408948] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0791bf4-84d0-4974-b23f-d991c1a17c5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.424226] env[68279]: DEBUG nova.compute.provider_tree [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.484303] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962722, 'name': ReconfigVM_Task, 'duration_secs': 0.177242} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.486078] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.486078] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ea7d680-a792-4132-a76c-1ebf36e7ffc4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.492586] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 714.492586] env[68279]: value = "task-2962723" [ 714.492586] env[68279]: _type = "Task" [ 714.492586] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.501495] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.549997] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962721, 'name': CreateVM_Task, 'duration_secs': 0.559964} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.549997] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 714.550669] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.550844] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.551199] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 714.551491] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf3ccb7d-ee2a-4f06-8827-5e3378569099 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.557354] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 714.557354] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523624a3-740f-05ed-a8df-236ce6c15c28" [ 714.557354] env[68279]: _type = "Task" [ 714.557354] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.567310] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523624a3-740f-05ed-a8df-236ce6c15c28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.572848] env[68279]: DEBUG nova.network.neutron [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updated VIF entry in instance network info cache for port ba399180-a9aa-4428-8f5a-2ca45969e646. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 714.573221] env[68279]: DEBUG nova.network.neutron [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updating instance_info_cache with network_info: [{"id": "ba399180-a9aa-4428-8f5a-2ca45969e646", "address": "fa:16:3e:81:ed:d2", "network": {"id": "5b08982e-5c8f-43fe-9c87-01afa1634a99", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1752329142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "144a0c8fec0d452fa465a921e9128d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba399180-a9", "ovs_interfaceid": "ba399180-a9aa-4428-8f5a-2ca45969e646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.782446] env[68279]: DEBUG oslo_concurrency.lockutils [None req-38e66807-be58-4910-b7fa-7f179119b922 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.823s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.844788] env[68279]: INFO nova.compute.manager [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Took 38.53 seconds to build instance. [ 714.927771] env[68279]: DEBUG nova.scheduler.client.report [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.004333] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962723, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.069962] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523624a3-740f-05ed-a8df-236ce6c15c28, 'name': SearchDatastore_Task, 'duration_secs': 0.013483} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.070513] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.071521] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.071950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.072169] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.072318] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.072587] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb51ab12-f2f7-4bc1-8c4f-01caed06d06b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.076443] env[68279]: DEBUG oslo_concurrency.lockutils [req-34082be9-ab98-4007-813c-3a569ece2d01 req-41494d3c-b98d-4167-bcf4-ceb5531912ea service nova] Releasing lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.086830] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.087035] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.087850] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-134eecf7-81b8-4528-b487-3c5905ec17a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.095308] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 715.095308] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52867db0-d9da-123f-0e93-768ab86fcb98" [ 715.095308] env[68279]: _type = "Task" [ 715.095308] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.107494] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52867db0-d9da-123f-0e93-768ab86fcb98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.222728] env[68279]: DEBUG nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 715.255018] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 715.255885] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 715.256264] env[68279]: DEBUG nova.virt.hardware [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 715.257380] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42344cc7-3d68-4d80-9f03-fc2d5756cf58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.268194] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3b51a5-d69d-47cc-9d4a-3785b8140641 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.284601] env[68279]: DEBUG nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 715.348018] env[68279]: DEBUG oslo_concurrency.lockutils [None req-796858c4-3207-40cb-b6fb-2552b6c79c1c tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.314s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.435798] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.237s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.437280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.104s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.438833] env[68279]: INFO nova.compute.claims [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.461415] env[68279]: INFO nova.scheduler.client.report [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Deleted allocations for instance 11c439ab-e27c-43e6-b752-c90af5f84bc1 [ 715.504178] env[68279]: DEBUG oslo_vmware.api [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962723, 'name': PowerOnVM_Task, 'duration_secs': 0.618323} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.504474] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.507494] env[68279]: DEBUG nova.compute.manager [None req-a4b6d1fd-e694-4877-8dba-5c1e7985ebe4 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 715.508297] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad378da-1f99-46a4-88b8-fe1ca8967cc7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.615640] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52867db0-d9da-123f-0e93-768ab86fcb98, 'name': SearchDatastore_Task, 'duration_secs': 0.039919} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.616659] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a58583ce-201c-4e8d-ab4d-34610ff34f12 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.625092] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 715.625092] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52543b72-1d15-3fe9-ead4-da2330aa0c89" [ 715.625092] env[68279]: _type = "Task" [ 715.625092] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.637606] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52543b72-1d15-3fe9-ead4-da2330aa0c89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.817933] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.851353] env[68279]: DEBUG nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 715.972202] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f122a695-ed39-4819-91d6-f9023a2c5774 tempest-DeleteServersAdminTestJSON-1657103917 tempest-DeleteServersAdminTestJSON-1657103917-project-member] Lock "11c439ab-e27c-43e6-b752-c90af5f84bc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.712s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.142090] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52543b72-1d15-3fe9-ead4-da2330aa0c89, 'name': SearchDatastore_Task, 'duration_secs': 0.01503} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.142171] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.142379] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1d16a5c5-981b-474e-8159-820ac6fcc42d/1d16a5c5-981b-474e-8159-820ac6fcc42d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.142667] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d97e075-628d-4423-b61d-172797dbd896 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.152529] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 716.152529] env[68279]: value = "task-2962724" [ 716.152529] env[68279]: _type = "Task" [ 716.152529] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.164808] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962724, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.186334] env[68279]: DEBUG nova.compute.manager [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Received event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.186572] env[68279]: DEBUG nova.compute.manager [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing instance network info cache due to event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 716.186769] env[68279]: DEBUG oslo_concurrency.lockutils [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] Acquiring lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.186917] env[68279]: DEBUG oslo_concurrency.lockutils [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] Acquired lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.187081] env[68279]: DEBUG nova.network.neutron [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.377839] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.439111] env[68279]: DEBUG nova.compute.manager [req-f2e7d04d-e865-4761-97d5-4e80748c0261 req-be770ac6-5814-48bd-992a-1b9e05eb9ed2 service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Received event network-vif-plugged-75ef3733-1b15-4793-9073-f9964cbea45d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.439111] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2e7d04d-e865-4761-97d5-4e80748c0261 req-be770ac6-5814-48bd-992a-1b9e05eb9ed2 service nova] Acquiring lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.439111] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2e7d04d-e865-4761-97d5-4e80748c0261 req-be770ac6-5814-48bd-992a-1b9e05eb9ed2 service nova] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.439111] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2e7d04d-e865-4761-97d5-4e80748c0261 req-be770ac6-5814-48bd-992a-1b9e05eb9ed2 service nova] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.440751] env[68279]: DEBUG nova.compute.manager [req-f2e7d04d-e865-4761-97d5-4e80748c0261 req-be770ac6-5814-48bd-992a-1b9e05eb9ed2 service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] No waiting events found dispatching network-vif-plugged-75ef3733-1b15-4793-9073-f9964cbea45d {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 716.440751] env[68279]: WARNING nova.compute.manager [req-f2e7d04d-e865-4761-97d5-4e80748c0261 req-be770ac6-5814-48bd-992a-1b9e05eb9ed2 service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Received unexpected event network-vif-plugged-75ef3733-1b15-4793-9073-f9964cbea45d for instance with vm_state building and task_state spawning. [ 716.547028] env[68279]: DEBUG nova.network.neutron [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Successfully updated port: 75ef3733-1b15-4793-9073-f9964cbea45d {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.664961] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962724, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.057286] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.057442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.057610] env[68279]: DEBUG nova.network.neutron [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.092408] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c8cc3a-25b8-4493-a889-4936796d57ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.101937] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2b6488-4ac7-4f87-82c2-f42a778d3ade {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.146648] env[68279]: DEBUG nova.network.neutron [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updated VIF entry in instance network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.147062] env[68279]: DEBUG nova.network.neutron [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.148877] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b6fdd1-1b93-48b7-a6d0-f4d25dd3591d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.162095] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9782564a-ae75-41cd-8e2b-887e795327db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.171304] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962724, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.867314} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.172034] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1d16a5c5-981b-474e-8159-820ac6fcc42d/1d16a5c5-981b-474e-8159-820ac6fcc42d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 717.172335] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 717.172869] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c5237d0-e6e4-4a6b-8e15-5579618ef6b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.187688] env[68279]: DEBUG nova.compute.provider_tree [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.195996] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 717.195996] env[68279]: value = "task-2962725" [ 717.195996] env[68279]: _type = "Task" [ 717.195996] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.207191] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962725, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.612110] env[68279]: DEBUG nova.network.neutron [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.653536] env[68279]: DEBUG oslo_concurrency.lockutils [req-f1873aa6-80bc-4b87-a8cb-2d067ac87d2e req-09c7c3d8-42da-4c4f-ba63-c871795adfd6 service nova] Releasing lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.691885] env[68279]: DEBUG nova.scheduler.client.report [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.712338] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962725, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091436} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.712511] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 717.713815] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0897a65-9de7-44ce-a57e-8ebe2cc0ac31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.741707] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 1d16a5c5-981b-474e-8159-820ac6fcc42d/1d16a5c5-981b-474e-8159-820ac6fcc42d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.742489] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8b71f34-43b3-4b72-9998-637181daa295 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.767901] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 717.767901] env[68279]: value = "task-2962726" [ 717.767901] env[68279]: _type = "Task" [ 717.767901] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.777039] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.875083] env[68279]: DEBUG nova.network.neutron [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Updating instance_info_cache with network_info: [{"id": "75ef3733-1b15-4793-9073-f9964cbea45d", "address": "fa:16:3e:69:6a:e6", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ef3733-1b", "ovs_interfaceid": "75ef3733-1b15-4793-9073-f9964cbea45d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.905899] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "fe92e176-222c-4c46-a254-1c12e21c68d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.906227] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.204129] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.767s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.204754] env[68279]: DEBUG nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 718.211031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.394s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.280212] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962726, 'name': ReconfigVM_Task, 'duration_secs': 0.415006} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.280527] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 1d16a5c5-981b-474e-8159-820ac6fcc42d/1d16a5c5-981b-474e-8159-820ac6fcc42d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.281186] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3487944f-3e17-476c-bd48-c8bd129e7d07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.291224] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 718.291224] env[68279]: value = "task-2962727" [ 718.291224] env[68279]: _type = "Task" [ 718.291224] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.306623] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962727, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.312064] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.312064] env[68279]: DEBUG nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Instance network_info: |[{"id": "75ef3733-1b15-4793-9073-f9964cbea45d", "address": "fa:16:3e:69:6a:e6", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ef3733-1b", "ovs_interfaceid": "75ef3733-1b15-4793-9073-f9964cbea45d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 718.312227] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:6a:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75ef3733-1b15-4793-9073-f9964cbea45d', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.322300] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Creating folder: Project (9fd4e00dfba449c5800a22fc37f2c40b). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.322838] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1ac5fa2-85a5-499f-a9de-c1cf01577667 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.337491] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Created folder: Project (9fd4e00dfba449c5800a22fc37f2c40b) in parent group-v594445. [ 718.337724] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Creating folder: Instances. Parent ref: group-v594545. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.337971] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37f2fda3-b8e5-40e8-818a-c94afa854a6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.353450] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Created folder: Instances in parent group-v594545. [ 718.353701] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.353897] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 718.354129] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb0db681-83f5-4df7-9b26-383dad420588 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.378102] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.378102] env[68279]: value = "task-2962730" [ 718.378102] env[68279]: _type = "Task" [ 718.378102] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.387589] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962730, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.704444] env[68279]: DEBUG nova.compute.manager [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 718.704712] env[68279]: DEBUG nova.compute.manager [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing instance network info cache due to event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 718.704989] env[68279]: DEBUG oslo_concurrency.lockutils [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] Acquiring lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.704989] env[68279]: DEBUG oslo_concurrency.lockutils [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] Acquired lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.705213] env[68279]: DEBUG nova.network.neutron [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 718.712144] env[68279]: DEBUG nova.compute.utils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 718.724311] env[68279]: DEBUG nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.726519] env[68279]: DEBUG nova.network.neutron [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.797529] env[68279]: DEBUG nova.policy [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655eae57bb1349c0a229c3b57f4d3446', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f947b60992d543c4b0bfee2553bfe357', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.805199] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962727, 'name': Rename_Task, 'duration_secs': 0.204761} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.805288] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 718.805476] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10d13fa2-8422-4359-9892-e0ffc1c98f56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.813279] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 718.813279] env[68279]: value = "task-2962731" [ 718.813279] env[68279]: _type = "Task" [ 718.813279] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.822296] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.891677] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962730, 'name': CreateVM_Task, 'duration_secs': 0.513489} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.892737] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.896104] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.896104] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.896104] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 718.896104] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fa34161-37c6-4037-bf2f-f37d27f295a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.901190] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 718.901190] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52071f89-7ec1-f609-3d5e-8576074a5761" [ 718.901190] env[68279]: _type = "Task" [ 718.901190] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.915037] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52071f89-7ec1-f609-3d5e-8576074a5761, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.040968] env[68279]: DEBUG nova.compute.manager [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Received event network-changed-75ef3733-1b15-4793-9073-f9964cbea45d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.040968] env[68279]: DEBUG nova.compute.manager [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Refreshing instance network info cache due to event network-changed-75ef3733-1b15-4793-9073-f9964cbea45d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 719.041131] env[68279]: DEBUG oslo_concurrency.lockutils [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] Acquiring lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.041282] env[68279]: DEBUG oslo_concurrency.lockutils [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] Acquired lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.041473] env[68279]: DEBUG nova.network.neutron [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Refreshing network info cache for port 75ef3733-1b15-4793-9073-f9964cbea45d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.115915] env[68279]: DEBUG nova.network.neutron [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Successfully created port: c349d19b-7d87-4e7b-b600-7943303ac6e0 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.225041] env[68279]: DEBUG nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.260633] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 50e08259-7915-49bb-b137-5cc6e9d53c16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.260633] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.260633] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 6b778e98-12c2-42a5-a772-06ea32d090b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.260633] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.260633] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.260983] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 010e5bfc-814c-4bde-8a16-7c2009ee13b6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 719.260983] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 336b7399-b64e-411f-99bc-ba0d292e371a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.261110] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.261286] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 92786813-f4ab-4ff7-8597-aa1aa90eeb01 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 719.261430] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.261570] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance eccc5882-2c8b-456d-bbd2-d9ed22777a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.263485] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance bf4e6484-d17d-4244-9163-1ef0012874b8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 719.263663] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 95f0aeaa-75ab-4fd9-b28d-e43703429167 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.263833] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 719.264423] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 61392426-52b8-437e-ab3d-122d9335cd36 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.264423] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.264423] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.264423] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance f7db383a-648a-4984-ae25-72bc2ccfe369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.264666] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance f927c34a-f155-4a1f-8151-b16a3cb3e9a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.264666] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 1d16a5c5-981b-474e-8159-820ac6fcc42d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.264666] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.296557] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.296856] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.324415] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962731, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.328863] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "ed86ef15-1941-40c5-8178-344a7b401b58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.329129] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "ed86ef15-1941-40c5-8178-344a7b401b58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.413424] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52071f89-7ec1-f609-3d5e-8576074a5761, 'name': SearchDatastore_Task, 'duration_secs': 0.016908} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.413515] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.414645] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 719.414645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.414645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.414645] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 719.414645] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9827be9f-5e48-4ffa-89e2-be503e17f6d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.431201] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.431395] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 719.432148] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c979d59-b15c-4ded-be3c-737f043222f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.439443] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 719.439443] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527e9779-1ebf-28b5-4f4a-36ab5d4fa307" [ 719.439443] env[68279]: _type = "Task" [ 719.439443] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.448185] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527e9779-1ebf-28b5-4f4a-36ab5d4fa307, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.542287] env[68279]: DEBUG nova.network.neutron [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updated VIF entry in instance network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 719.542287] env[68279]: DEBUG nova.network.neutron [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.771970] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance b2e272b3-520a-4ef7-8141-a9d55739d6b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 719.772272] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance d8eca7ac-744e-469c-9a87-901f0641f4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 719.775031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.775031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.775031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.775197] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.775385] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.778774] env[68279]: INFO nova.compute.manager [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Terminating instance [ 719.824508] env[68279]: DEBUG oslo_vmware.api [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962731, 'name': PowerOnVM_Task, 'duration_secs': 0.521732} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.824508] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 719.824690] env[68279]: INFO nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Took 9.55 seconds to spawn the instance on the hypervisor. [ 719.824980] env[68279]: DEBUG nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 719.825759] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d4a713-9972-4ed5-834e-c327f2ff0338 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.871329] env[68279]: DEBUG nova.network.neutron [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Updated VIF entry in instance network info cache for port 75ef3733-1b15-4793-9073-f9964cbea45d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 719.874576] env[68279]: DEBUG nova.network.neutron [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Updating instance_info_cache with network_info: [{"id": "75ef3733-1b15-4793-9073-f9964cbea45d", "address": "fa:16:3e:69:6a:e6", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ef3733-1b", "ovs_interfaceid": "75ef3733-1b15-4793-9073-f9964cbea45d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.954986] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527e9779-1ebf-28b5-4f4a-36ab5d4fa307, 'name': SearchDatastore_Task, 'duration_secs': 0.034397} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.956552] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8f96ddf-07ab-44d7-8c75-49fa19760f22 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.963467] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 719.963467] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e5707-0c73-783d-1f5a-7cd12a6a2683" [ 719.963467] env[68279]: _type = "Task" [ 719.963467] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.971802] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e5707-0c73-783d-1f5a-7cd12a6a2683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.044557] env[68279]: DEBUG oslo_concurrency.lockutils [req-2be97a47-8e20-4b86-96a4-84d9aa2fefbb req-ef7e8e29-b2cb-47ec-bace-30246e790e89 service nova] Releasing lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.237993] env[68279]: DEBUG nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 720.263540] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 720.263784] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.263940] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.264139] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.264289] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.264435] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 720.264641] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 720.264799] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 720.264963] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 720.265215] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 720.265400] env[68279]: DEBUG nova.virt.hardware [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 720.266277] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dfdfaf-45de-4f2c-98c4-4fac93e990e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.275411] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e5565f0d-ed60-4ac8-bba1-ab46b337dd90 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 720.277412] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3137b6a4-8c4c-465b-8032-5831c2c7640b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.282895] env[68279]: DEBUG nova.compute.manager [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 720.283123] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.283919] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fea2ae-783a-4474-be29-de44ad00f268 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.301684] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 720.303643] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-952911cd-6ad9-4712-a8db-3779e9e0a705 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.309340] env[68279]: DEBUG oslo_vmware.api [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 720.309340] env[68279]: value = "task-2962732" [ 720.309340] env[68279]: _type = "Task" [ 720.309340] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.318920] env[68279]: DEBUG oslo_vmware.api [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.343473] env[68279]: INFO nova.compute.manager [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Took 41.71 seconds to build instance. [ 720.374093] env[68279]: DEBUG oslo_concurrency.lockutils [req-f7c90222-19ea-4130-b2f4-9aa36fc51d46 req-f9a4fef4-ff08-4f5f-b941-c5378e9e603c service nova] Releasing lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.475380] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e5707-0c73-783d-1f5a-7cd12a6a2683, 'name': SearchDatastore_Task, 'duration_secs': 0.033134} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.475855] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.476134] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.476403] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd3b2541-e5f3-43a6-a56f-8f4ae4e141d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.484579] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 720.484579] env[68279]: value = "task-2962733" [ 720.484579] env[68279]: _type = "Task" [ 720.484579] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.493808] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962733, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.658666] env[68279]: DEBUG nova.network.neutron [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Successfully updated port: c349d19b-7d87-4e7b-b600-7943303ac6e0 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 720.784165] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 4e157792-f910-492c-ab29-dd3f86cb96a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 720.821662] env[68279]: DEBUG oslo_vmware.api [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962732, 'name': PowerOffVM_Task, 'duration_secs': 0.289851} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.822063] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 720.822188] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 720.822404] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-431e287a-29d5-4164-9cb3-3875b9546863 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.848562] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6421b66b-fa20-4533-822e-39158af9cbc3 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.763s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.900743] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 720.900976] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 720.901398] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Deleting the datastore file [datastore2] f927c34a-f155-4a1f-8151-b16a3cb3e9a1 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 720.901533] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7bd9fcbc-b6df-41c7-adb0-cc3d9e9c54b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.912069] env[68279]: DEBUG oslo_vmware.api [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for the task: (returnval){ [ 720.912069] env[68279]: value = "task-2962735" [ 720.912069] env[68279]: _type = "Task" [ 720.912069] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.924090] env[68279]: DEBUG oslo_vmware.api [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.996643] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962733, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.162012] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-d8eca7ac-744e-469c-9a87-901f0641f4f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.162196] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-d8eca7ac-744e-469c-9a87-901f0641f4f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.162343] env[68279]: DEBUG nova.network.neutron [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.287467] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.353092] env[68279]: DEBUG nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 721.425590] env[68279]: DEBUG oslo_vmware.api [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.496141] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962733, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533404} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.496431] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.496651] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.496900] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d134cda4-88a5-404b-a4db-b002143c75f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.504884] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 721.504884] env[68279]: value = "task-2962736" [ 721.504884] env[68279]: _type = "Task" [ 721.504884] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.513374] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.698667] env[68279]: DEBUG nova.network.neutron [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.790811] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance a332b35f-4f96-4f8f-aa9a-d7fadf9ede53 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.842484] env[68279]: DEBUG nova.network.neutron [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Updating instance_info_cache with network_info: [{"id": "c349d19b-7d87-4e7b-b600-7943303ac6e0", "address": "fa:16:3e:af:24:bf", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc349d19b-7d", "ovs_interfaceid": "c349d19b-7d87-4e7b-b600-7943303ac6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.884365] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.925452] env[68279]: DEBUG oslo_vmware.api [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Task: {'id': task-2962735, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.631009} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.925824] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.926583] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 721.926583] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.926583] env[68279]: INFO nova.compute.manager [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Took 1.64 seconds to destroy the instance on the hypervisor. [ 721.927200] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.927416] env[68279]: DEBUG nova.compute.manager [-] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 721.927518] env[68279]: DEBUG nova.network.neutron [-] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.015703] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072313} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.016041] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.016998] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4265acf1-ec0f-444f-8e58-d1b310a4a6cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.042885] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.043252] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b38be89c-b708-4588-924a-64ed98464c7d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.065414] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 722.065414] env[68279]: value = "task-2962737" [ 722.065414] env[68279]: _type = "Task" [ 722.065414] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.075328] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962737, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.114745] env[68279]: DEBUG nova.compute.manager [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 722.114938] env[68279]: DEBUG nova.compute.manager [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing instance network info cache due to event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 722.115246] env[68279]: DEBUG oslo_concurrency.lockutils [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] Acquiring lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.115410] env[68279]: DEBUG oslo_concurrency.lockutils [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] Acquired lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.115630] env[68279]: DEBUG nova.network.neutron [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 722.195935] env[68279]: DEBUG nova.compute.manager [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Received event network-vif-plugged-c349d19b-7d87-4e7b-b600-7943303ac6e0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 722.196089] env[68279]: DEBUG oslo_concurrency.lockutils [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] Acquiring lock "d8eca7ac-744e-469c-9a87-901f0641f4f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.196274] env[68279]: DEBUG oslo_concurrency.lockutils [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.196492] env[68279]: DEBUG oslo_concurrency.lockutils [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.196659] env[68279]: DEBUG nova.compute.manager [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] No waiting events found dispatching network-vif-plugged-c349d19b-7d87-4e7b-b600-7943303ac6e0 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 722.196817] env[68279]: WARNING nova.compute.manager [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Received unexpected event network-vif-plugged-c349d19b-7d87-4e7b-b600-7943303ac6e0 for instance with vm_state building and task_state spawning. [ 722.197263] env[68279]: DEBUG nova.compute.manager [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Received event network-changed-c349d19b-7d87-4e7b-b600-7943303ac6e0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 722.197263] env[68279]: DEBUG nova.compute.manager [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Refreshing instance network info cache due to event network-changed-c349d19b-7d87-4e7b-b600-7943303ac6e0. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 722.197548] env[68279]: DEBUG oslo_concurrency.lockutils [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] Acquiring lock "refresh_cache-d8eca7ac-744e-469c-9a87-901f0641f4f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.293942] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 05b94aa5-3efc-4790-9d98-c2658b8e8b4b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 722.347859] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-d8eca7ac-744e-469c-9a87-901f0641f4f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.348144] env[68279]: DEBUG nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Instance network_info: |[{"id": "c349d19b-7d87-4e7b-b600-7943303ac6e0", "address": "fa:16:3e:af:24:bf", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc349d19b-7d", "ovs_interfaceid": "c349d19b-7d87-4e7b-b600-7943303ac6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 722.348477] env[68279]: DEBUG oslo_concurrency.lockutils [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] Acquired lock "refresh_cache-d8eca7ac-744e-469c-9a87-901f0641f4f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.348663] env[68279]: DEBUG nova.network.neutron [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Refreshing network info cache for port c349d19b-7d87-4e7b-b600-7943303ac6e0 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 722.350083] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:24:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c349d19b-7d87-4e7b-b600-7943303ac6e0', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 722.358353] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating folder: Project (f947b60992d543c4b0bfee2553bfe357). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.359138] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f123596c-77e5-45b9-b505-f593b0daab83 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.372151] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created folder: Project (f947b60992d543c4b0bfee2553bfe357) in parent group-v594445. [ 722.372151] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating folder: Instances. Parent ref: group-v594548. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.372310] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39ace456-95f7-4114-97eb-64a544797df9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.384040] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created folder: Instances in parent group-v594548. [ 722.384040] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 722.384040] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 722.384040] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-702bb631-62dc-4d15-89a0-e7fb8931fc29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.405038] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 722.405038] env[68279]: value = "task-2962740" [ 722.405038] env[68279]: _type = "Task" [ 722.405038] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.413276] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962740, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.583235] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962737, 'name': ReconfigVM_Task, 'duration_secs': 0.504605} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.583529] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.584185] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c73a23a-ac3b-4981-bb12-766d6c9bbecc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.597046] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 722.597046] env[68279]: value = "task-2962741" [ 722.597046] env[68279]: _type = "Task" [ 722.597046] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.613073] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962741, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.678606] env[68279]: DEBUG nova.network.neutron [-] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.796847] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance d452e3d2-1590-4352-8406-31d85b2921f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 722.848939] env[68279]: DEBUG nova.network.neutron [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updated VIF entry in instance network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 722.849380] env[68279]: DEBUG nova.network.neutron [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.919080] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962740, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.110255] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962741, 'name': Rename_Task, 'duration_secs': 0.25578} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.110556] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.110840] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1f737fd-92ab-4584-9678-6d557aed5c40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.119030] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 723.119030] env[68279]: value = "task-2962742" [ 723.119030] env[68279]: _type = "Task" [ 723.119030] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.127937] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.159834] env[68279]: DEBUG nova.network.neutron [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Updated VIF entry in instance network info cache for port c349d19b-7d87-4e7b-b600-7943303ac6e0. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.160308] env[68279]: DEBUG nova.network.neutron [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Updating instance_info_cache with network_info: [{"id": "c349d19b-7d87-4e7b-b600-7943303ac6e0", "address": "fa:16:3e:af:24:bf", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc349d19b-7d", "ovs_interfaceid": "c349d19b-7d87-4e7b-b600-7943303ac6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.181325] env[68279]: INFO nova.compute.manager [-] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Took 1.25 seconds to deallocate network for instance. [ 723.300297] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 01a624d3-782d-44cf-8a4e-05a85ac91c64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 723.352819] env[68279]: DEBUG oslo_concurrency.lockutils [req-3e0e811a-eab5-405a-80f1-5d9a9d2b5033 req-5e514a1b-1a82-451e-903d-efd25178d521 service nova] Releasing lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.419376] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962740, 'name': CreateVM_Task, 'duration_secs': 0.526745} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.419569] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 723.420296] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.420463] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.421710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 723.421986] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7714572-82cc-487e-a9d5-0d78b340d656 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.427582] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 723.427582] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bade7f-28ce-3f21-2e97-f39574c4d28f" [ 723.427582] env[68279]: _type = "Task" [ 723.427582] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.436814] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bade7f-28ce-3f21-2e97-f39574c4d28f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.634417] env[68279]: DEBUG oslo_vmware.api [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962742, 'name': PowerOnVM_Task, 'duration_secs': 0.491439} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.634810] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 723.634810] env[68279]: INFO nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Took 8.41 seconds to spawn the instance on the hypervisor. [ 723.634903] env[68279]: DEBUG nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 723.635777] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bba1cc-8799-4302-9305-fa8470d08f2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.662713] env[68279]: DEBUG oslo_concurrency.lockutils [req-50b364b7-d40d-4eb0-9c30-29dac5917456 req-619cfa80-7c43-4338-9c3b-ad686656f375 service nova] Releasing lock "refresh_cache-d8eca7ac-744e-469c-9a87-901f0641f4f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.688172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.803900] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7858163d-8e68-4565-b1e0-ecd2e9be350d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 723.942022] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bade7f-28ce-3f21-2e97-f39574c4d28f, 'name': SearchDatastore_Task, 'duration_secs': 0.010406} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.942361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.942595] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 723.942831] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.942972] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.943154] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.943427] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98814904-e25b-4a6d-8216-274b2ead9985 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.957149] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.957149] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 723.957685] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ba4ce64-06d8-47eb-aef1-3897bb4fb961 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.964276] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 723.964276] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520fa328-93d1-4faf-2434-e21c9700bd1e" [ 723.964276] env[68279]: _type = "Task" [ 723.964276] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.973607] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520fa328-93d1-4faf-2434-e21c9700bd1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.157344] env[68279]: INFO nova.compute.manager [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Took 40.61 seconds to build instance. [ 724.307788] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 43f629d6-bdc3-4345-97ec-26ce2c9d7be7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 724.475906] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520fa328-93d1-4faf-2434-e21c9700bd1e, 'name': SearchDatastore_Task, 'duration_secs': 0.037108} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.476617] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38916b9a-e172-4289-8067-eaeb9ecc2383 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.482437] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 724.482437] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dddb55-b28a-58e1-81fd-91084e77ebfe" [ 724.482437] env[68279]: _type = "Task" [ 724.482437] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.491085] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dddb55-b28a-58e1-81fd-91084e77ebfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.658460] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e493c5b5-15bd-48c5-98a9-6cee91a150ed tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.622s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.810406] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 724.916923] env[68279]: DEBUG nova.compute.manager [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.917131] env[68279]: DEBUG nova.compute.manager [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing instance network info cache due to event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 724.917310] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Acquiring lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.917455] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Acquired lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.917672] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.933098] env[68279]: DEBUG nova.compute.manager [req-140e00c8-4d07-4694-95d7-28373bd437d5 req-45e4e653-bff2-4399-b151-ed8b9c76e372 service nova] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Received event network-vif-deleted-8023cef8-1786-40b1-a2f3-0692fb595915 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.994633] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dddb55-b28a-58e1-81fd-91084e77ebfe, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.994904] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.995194] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 724.995445] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-877cd51b-99f5-4c36-868a-dd6be51d6ff0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.003087] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 725.003087] env[68279]: value = "task-2962743" [ 725.003087] env[68279]: _type = "Task" [ 725.003087] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.011753] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.161765] env[68279]: DEBUG nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 725.314757] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 665d932d-1068-4bb2-835c-2184a80753d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 725.515729] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962743, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471741} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.515729] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 725.515729] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.516389] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a387d74-8aab-47ec-9b95-fd8c19f6ecdd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.523920] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 725.523920] env[68279]: value = "task-2962744" [ 725.523920] env[68279]: _type = "Task" [ 725.523920] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.534615] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.637099] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updated VIF entry in instance network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.637234] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.686185] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.699850] env[68279]: INFO nova.compute.manager [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Rescuing [ 725.700116] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.700283] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.700453] env[68279]: DEBUG nova.network.neutron [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.818664] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance fe92e176-222c-4c46-a254-1c12e21c68d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 725.824032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "d61b2c4f-942a-4e29-8cac-11bc0750605a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.824563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.033741] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059813} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.034000] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.034815] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cb1a67-adac-4f26-9f81-35493f606755 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.058561] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.058868] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-013e6aeb-37b5-4045-b5dc-291cb86999de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.080965] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 726.080965] env[68279]: value = "task-2962745" [ 726.080965] env[68279]: _type = "Task" [ 726.080965] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.090250] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.140560] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Releasing lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.140560] env[68279]: DEBUG nova.compute.manager [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Received event network-changed-ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.140748] env[68279]: DEBUG nova.compute.manager [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Refreshing instance network info cache due to event network-changed-ba399180-a9aa-4428-8f5a-2ca45969e646. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 726.140881] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Acquiring lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.141045] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Acquired lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.141219] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Refreshing network info cache for port ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.322266] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance ed86ef15-1941-40c5-8178-344a7b401b58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 726.404764] env[68279]: DEBUG nova.network.neutron [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Updating instance_info_cache with network_info: [{"id": "75ef3733-1b15-4793-9073-f9964cbea45d", "address": "fa:16:3e:69:6a:e6", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ef3733-1b", "ovs_interfaceid": "75ef3733-1b15-4793-9073-f9964cbea45d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.448540] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "61392426-52b8-437e-ab3d-122d9335cd36" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.448941] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "61392426-52b8-437e-ab3d-122d9335cd36" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.449032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "61392426-52b8-437e-ab3d-122d9335cd36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.449222] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "61392426-52b8-437e-ab3d-122d9335cd36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.449389] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "61392426-52b8-437e-ab3d-122d9335cd36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.451705] env[68279]: INFO nova.compute.manager [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Terminating instance [ 726.591421] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.825709] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance cfaee7e2-6929-4d8c-8614-e19e0055f2fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 726.825709] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 726.826063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 726.882176] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updated VIF entry in instance network info cache for port ba399180-a9aa-4428-8f5a-2ca45969e646. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 726.882527] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updating instance_info_cache with network_info: [{"id": "ba399180-a9aa-4428-8f5a-2ca45969e646", "address": "fa:16:3e:81:ed:d2", "network": {"id": "5b08982e-5c8f-43fe-9c87-01afa1634a99", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1752329142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "144a0c8fec0d452fa465a921e9128d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba399180-a9", "ovs_interfaceid": "ba399180-a9aa-4428-8f5a-2ca45969e646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.908117] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "refresh_cache-015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.959064] env[68279]: DEBUG nova.compute.manager [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 726.959064] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.959064] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd04982-38e3-4444-810a-c19afd52e9d7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.969055] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 726.969172] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be470b40-2fdb-4ac6-9e15-d1728abbecce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.978291] env[68279]: DEBUG oslo_vmware.api [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 726.978291] env[68279]: value = "task-2962746" [ 726.978291] env[68279]: _type = "Task" [ 726.978291] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.992751] env[68279]: DEBUG oslo_vmware.api [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.091165] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962745, 'name': ReconfigVM_Task, 'duration_secs': 0.734775} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.093860] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Reconfigured VM instance instance-00000022 to attach disk [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.094659] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-290f89c7-a44f-4f3b-a801-3586699cd31e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.102758] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 727.102758] env[68279]: value = "task-2962747" [ 727.102758] env[68279]: _type = "Task" [ 727.102758] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.114374] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962747, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.369976] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb9a970-74b5-4116-b6e1-d4c39192ba64 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.380363] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4769d9-105e-4f58-b12a-c17b5e4b7d58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.386167] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Releasing lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.386167] env[68279]: DEBUG nova.compute.manager [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 727.386167] env[68279]: DEBUG nova.compute.manager [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing instance network info cache due to event network-changed-a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 727.387504] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Acquiring lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.387504] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Acquired lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.387504] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Refreshing network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.417144] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7339428b-54e0-4b70-8b56-8d6c903419c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.439647] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c07863-4ef9-4a60-a6bc-d31dfc23fa5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.460044] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.478539] env[68279]: DEBUG nova.compute.manager [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Received event network-changed-ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 727.478756] env[68279]: DEBUG nova.compute.manager [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Refreshing instance network info cache due to event network-changed-ba399180-a9aa-4428-8f5a-2ca45969e646. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 727.478973] env[68279]: DEBUG oslo_concurrency.lockutils [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] Acquiring lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.479154] env[68279]: DEBUG oslo_concurrency.lockutils [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] Acquired lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.479289] env[68279]: DEBUG nova.network.neutron [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Refreshing network info cache for port ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.491934] env[68279]: DEBUG oslo_vmware.api [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962746, 'name': PowerOffVM_Task, 'duration_secs': 0.404724} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.492340] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 727.492625] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 727.493321] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4b4ad50-3c8d-4013-9d71-dcc306c0f2cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.580797] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 727.581042] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 727.581198] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Deleting the datastore file [datastore1] 61392426-52b8-437e-ab3d-122d9335cd36 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 727.582255] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c02a0d4-8dd2-4718-bdfb-6d25962d5a34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.591102] env[68279]: DEBUG oslo_vmware.api [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for the task: (returnval){ [ 727.591102] env[68279]: value = "task-2962749" [ 727.591102] env[68279]: _type = "Task" [ 727.591102] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.599206] env[68279]: DEBUG oslo_vmware.api [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.611897] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962747, 'name': Rename_Task, 'duration_secs': 0.194533} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.612300] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.612604] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b41105c-a247-4f28-80a9-9686bf847848 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.621050] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 727.621050] env[68279]: value = "task-2962750" [ 727.621050] env[68279]: _type = "Task" [ 727.621050] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.621947] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updated VIF entry in instance network info cache for port a981d4aa-4af9-4362-9690-4170835dd9b4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 727.622432] env[68279]: DEBUG nova.network.neutron [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [{"id": "a981d4aa-4af9-4362-9690-4170835dd9b4", "address": "fa:16:3e:fc:78:e3", "network": {"id": "682d481c-26b3-40c1-a4df-f4dab0d5a10c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-492778916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "abc2a94d5ee444449c6c4d088263440a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa981d4aa-4a", "ovs_interfaceid": "a981d4aa-4af9-4362-9690-4170835dd9b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.634968] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962750, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.963261] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.101157] env[68279]: DEBUG oslo_vmware.api [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Task: {'id': task-2962749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350678} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.103687] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 728.103829] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 728.104452] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 728.104452] env[68279]: INFO nova.compute.manager [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Took 1.15 seconds to destroy the instance on the hypervisor. [ 728.104452] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 728.104622] env[68279]: DEBUG nova.compute.manager [-] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 728.104717] env[68279]: DEBUG nova.network.neutron [-] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.127811] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f911e41-f2b6-4e3e-b140-b9f7d0990dbb req-b82be3c2-d676-4e80-a613-09c9a04b11aa service nova] Releasing lock "refresh_cache-61392426-52b8-437e-ab3d-122d9335cd36" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.134111] env[68279]: DEBUG oslo_vmware.api [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962750, 'name': PowerOnVM_Task, 'duration_secs': 0.464823} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.136942] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.137277] env[68279]: INFO nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Took 7.90 seconds to spawn the instance on the hypervisor. [ 728.137562] env[68279]: DEBUG nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.138373] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61f055a-297f-4481-8f1e-1fd666e1817e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.208520] env[68279]: DEBUG nova.network.neutron [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updated VIF entry in instance network info cache for port ba399180-a9aa-4428-8f5a-2ca45969e646. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 728.208947] env[68279]: DEBUG nova.network.neutron [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updating instance_info_cache with network_info: [{"id": "ba399180-a9aa-4428-8f5a-2ca45969e646", "address": "fa:16:3e:81:ed:d2", "network": {"id": "5b08982e-5c8f-43fe-9c87-01afa1634a99", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1752329142-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "144a0c8fec0d452fa465a921e9128d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba399180-a9", "ovs_interfaceid": "ba399180-a9aa-4428-8f5a-2ca45969e646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.455188] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.455188] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-023c5c4b-5bfa-4d88-b8be-ecbffb8ebbed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.464436] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 728.464436] env[68279]: value = "task-2962751" [ 728.464436] env[68279]: _type = "Task" [ 728.464436] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.469675] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 728.469898] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.262s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.470176] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.459s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.471750] env[68279]: INFO nova.compute.claims [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.481342] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.657745] env[68279]: INFO nova.compute.manager [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Took 42.37 seconds to build instance. [ 728.711945] env[68279]: DEBUG oslo_concurrency.lockutils [req-a53d361e-4474-447b-a3e3-54bfbf792d32 req-6e66a253-5808-4899-99c0-21182a60340d service nova] Releasing lock "refresh_cache-1d16a5c5-981b-474e-8159-820ac6fcc42d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.894967] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "1d16a5c5-981b-474e-8159-820ac6fcc42d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.895260] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.895468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "1d16a5c5-981b-474e-8159-820ac6fcc42d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.895653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.895820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.898765] env[68279]: INFO nova.compute.manager [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Terminating instance [ 728.974803] env[68279]: DEBUG nova.network.neutron [-] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.988875] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962751, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.160417] env[68279]: DEBUG oslo_concurrency.lockutils [None req-798c35fd-b4cb-42a8-9876-4c26d8cf8149 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.298s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.404577] env[68279]: DEBUG nova.compute.manager [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 729.404577] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.406145] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c0458e-e303-4b0c-adba-ac7935e25627 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.415308] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 729.415572] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-501db748-6724-40d2-9050-9ce3366ade20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.440145] env[68279]: DEBUG oslo_vmware.api [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 729.440145] env[68279]: value = "task-2962752" [ 729.440145] env[68279]: _type = "Task" [ 729.440145] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.452073] env[68279]: DEBUG oslo_vmware.api [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.477731] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962751, 'name': PowerOffVM_Task, 'duration_secs': 0.599598} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.477980] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.478817] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6692a8c-45c6-4929-a4ad-43d5d60a827a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.481564] env[68279]: INFO nova.compute.manager [-] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Took 1.38 seconds to deallocate network for instance. [ 729.504315] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09e6581-7b8a-4342-8a13-51ffb3776c11 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.541131] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 729.541436] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9473fed2-6585-4ae2-90a0-dba4659f7a2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.554537] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 729.554537] env[68279]: value = "task-2962753" [ 729.554537] env[68279]: _type = "Task" [ 729.554537] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.570794] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 729.571112] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.571430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.571625] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.571889] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.572635] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00bf898c-f668-44b7-ba18-e1125a5f81b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.587967] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.588197] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.589045] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-124cc08e-fdf2-4704-9d52-fded1bc32bc4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.595319] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 729.595319] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5299148e-97be-f8e9-4137-aa17f34bf5cc" [ 729.595319] env[68279]: _type = "Task" [ 729.595319] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.597451] env[68279]: DEBUG nova.compute.manager [req-9c436c3d-5e63-4dd1-a680-b2bd812a2efb req-5554b158-8407-4f9d-8f6a-b04e7197a71b service nova] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Received event network-vif-deleted-a981d4aa-4af9-4362-9690-4170835dd9b4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 729.610727] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5299148e-97be-f8e9-4137-aa17f34bf5cc, 'name': SearchDatastore_Task, 'duration_secs': 0.01192} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.611689] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71f3e4f5-fabb-4ece-8a67-b1c532e99832 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.627063] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 729.627063] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c80992-b63c-c5be-4b99-6dffa322e6a8" [ 729.627063] env[68279]: _type = "Task" [ 729.627063] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.641548] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c80992-b63c-c5be-4b99-6dffa322e6a8, 'name': SearchDatastore_Task, 'duration_secs': 0.020187} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.641835] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.642122] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. {{(pid=68279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 729.642403] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fd1eaed-4e5b-43fd-8475-e52c93e8e5c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.650968] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 729.650968] env[68279]: value = "task-2962754" [ 729.650968] env[68279]: _type = "Task" [ 729.650968] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.661630] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962754, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.663144] env[68279]: DEBUG nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 729.939779] env[68279]: INFO nova.compute.manager [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Rebuilding instance [ 729.957393] env[68279]: DEBUG oslo_vmware.api [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962752, 'name': PowerOffVM_Task, 'duration_secs': 0.394601} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.957636] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.957857] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.958174] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ad583b0-3b45-49ba-afb4-1bba77b5e948 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.994433] env[68279]: DEBUG nova.compute.manager [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 729.995354] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e836225-c69c-4b9e-af9d-fa2b6d88d334 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.004489] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.148044] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40df75c-b027-408e-95c1-d3159e754d80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.158926] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a28646-9798-4cda-aada-71fc62e7ee3e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.165670] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962754, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.197547] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba4fe20-49f1-4714-9120-ca2f3d333a93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.205898] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a559cd67-5004-48bb-9857-3d0ede393232 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.212035] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.222710] env[68279]: DEBUG nova.compute.provider_tree [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.664045] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962754, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635493} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.664045] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. [ 730.664464] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332cd7b0-d93a-4faa-ab20-da809805b99a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.693072] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 730.693399] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eed76a43-d77b-4d9c-b044-fcf889c3071b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.714375] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 730.714375] env[68279]: value = "task-2962756" [ 730.714375] env[68279]: _type = "Task" [ 730.714375] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.723544] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962756, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.728612] env[68279]: DEBUG nova.scheduler.client.report [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.013727] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.014078] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f29e019-81b1-4518-a00d-ec33547e5d9d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.024009] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 731.024009] env[68279]: value = "task-2962757" [ 731.024009] env[68279]: _type = "Task" [ 731.024009] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.033103] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.084873] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.085109] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.085250] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Deleting the datastore file [datastore1] 1d16a5c5-981b-474e-8159-820ac6fcc42d {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.085526] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4320f6a1-0b6c-4742-8310-3a75987ecb9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.093554] env[68279]: DEBUG oslo_vmware.api [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for the task: (returnval){ [ 731.093554] env[68279]: value = "task-2962758" [ 731.093554] env[68279]: _type = "Task" [ 731.093554] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.102303] env[68279]: DEBUG oslo_vmware.api [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.225858] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962756, 'name': ReconfigVM_Task, 'duration_secs': 0.482708} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.226212] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 731.227258] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f1bc67-0998-4f67-8740-24df7a96f43a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.233471] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.234067] env[68279]: DEBUG nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 731.254340] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.117s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.255087] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.256710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.798s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.258380] env[68279]: INFO nova.compute.claims [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.268037] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-411afd11-b16d-4259-a6bb-5436bc7928c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.284362] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 731.284362] env[68279]: value = "task-2962759" [ 731.284362] env[68279]: _type = "Task" [ 731.284362] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.290891] env[68279]: INFO nova.scheduler.client.report [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted allocations for instance 92786813-f4ab-4ff7-8597-aa1aa90eeb01 [ 731.298654] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962759, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.534088] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962757, 'name': PowerOffVM_Task, 'duration_secs': 0.298314} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.534353] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.534576] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.535329] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe51be73-85b2-4cf9-9e2a-a9497ea50748 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.541828] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.542053] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ff20c7d-d9c1-4abe-a002-fcae03980b42 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.601239] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.601412] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.601586] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleting the datastore file [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.601839] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3e0920a-1513-4eb3-847f-0e3e04833e64 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.606550] env[68279]: DEBUG oslo_vmware.api [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Task: {'id': task-2962758, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192858} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.607125] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.607314] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.607519] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.607794] env[68279]: INFO nova.compute.manager [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Took 2.20 seconds to destroy the instance on the hypervisor. [ 731.608065] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 731.608265] env[68279]: DEBUG nova.compute.manager [-] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 731.608362] env[68279]: DEBUG nova.network.neutron [-] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.611081] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 731.611081] env[68279]: value = "task-2962761" [ 731.611081] env[68279]: _type = "Task" [ 731.611081] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.619187] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.768687] env[68279]: DEBUG nova.compute.utils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 731.773533] env[68279]: DEBUG nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 731.773533] env[68279]: DEBUG nova.network.neutron [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.795847] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962759, 'name': ReconfigVM_Task, 'duration_secs': 0.183237} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.796134] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 731.796394] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11464fa9-ff1d-4222-be36-9317d042468a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.805617] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 731.805617] env[68279]: value = "task-2962762" [ 731.805617] env[68279]: _type = "Task" [ 731.805617] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.806123] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1c28480-5a70-428e-9806-d501766ff8c1 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "92786813-f4ab-4ff7-8597-aa1aa90eeb01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.747s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.816371] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.829596] env[68279]: DEBUG nova.policy [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e35ee3c842384e36a91fb335dd81e98a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fadbbd31a4314d12a378689150d3a24d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 732.099340] env[68279]: DEBUG nova.compute.manager [req-5821cd9b-e662-495d-bb99-35b6b4ecb7e3 req-2c08ba97-0d41-4066-994e-ae466a63b06f service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Received event network-vif-deleted-ba399180-a9aa-4428-8f5a-2ca45969e646 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.099340] env[68279]: INFO nova.compute.manager [req-5821cd9b-e662-495d-bb99-35b6b4ecb7e3 req-2c08ba97-0d41-4066-994e-ae466a63b06f service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Neutron deleted interface ba399180-a9aa-4428-8f5a-2ca45969e646; detaching it from the instance and deleting it from the info cache [ 732.099340] env[68279]: DEBUG nova.network.neutron [req-5821cd9b-e662-495d-bb99-35b6b4ecb7e3 req-2c08ba97-0d41-4066-994e-ae466a63b06f service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.118252] env[68279]: DEBUG nova.network.neutron [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Successfully created port: ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.124013] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155032} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.124320] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.124542] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.124763] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.273684] env[68279]: DEBUG nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 732.319206] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962762, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.520034] env[68279]: DEBUG nova.network.neutron [-] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.603891] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90347e7b-0c58-48f7-b9fc-85b015a4c7ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.617678] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793f0b4f-c4f9-4689-9a3c-a69e2d4dbe3c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.669259] env[68279]: DEBUG nova.compute.manager [req-5821cd9b-e662-495d-bb99-35b6b4ecb7e3 req-2c08ba97-0d41-4066-994e-ae466a63b06f service nova] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Detach interface failed, port_id=ba399180-a9aa-4428-8f5a-2ca45969e646, reason: Instance 1d16a5c5-981b-474e-8159-820ac6fcc42d could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 732.822208] env[68279]: DEBUG oslo_vmware.api [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962762, 'name': PowerOnVM_Task, 'duration_secs': 0.937303} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.822525] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 732.827918] env[68279]: DEBUG nova.compute.manager [None req-bc63a244-7194-4a6a-a9f1-3638aad18ac4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 732.828863] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af814781-e3b6-48df-893e-f7ecfc347ea2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.905556] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb8b6ad-f277-464a-9714-43e261f95569 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.914452] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a60aea-54fb-4117-bc1a-593aae01fa71 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.949472] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220548f3-98eb-4464-b2e1-93c0ae4ceee6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.958556] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a3d4f5-ff17-4cee-929b-0129c3189e81 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.974223] env[68279]: DEBUG nova.compute.provider_tree [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.022507] env[68279]: INFO nova.compute.manager [-] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Took 1.41 seconds to deallocate network for instance. [ 733.172736] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 733.173094] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.173222] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 733.173411] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.173560] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 733.173706] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 733.173938] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 733.174134] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 733.174310] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 733.174473] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 733.174648] env[68279]: DEBUG nova.virt.hardware [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 733.175540] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1773cb8-61a8-4abc-9d8a-97eaf3a181e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.184875] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e3c62e-b88f-4aee-9925-03ae7a74c82b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.200124] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:24:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c349d19b-7d87-4e7b-b600-7943303ac6e0', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.207529] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.207804] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 733.208030] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fab4595a-73e5-4e73-8111-31343dd52f28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.229701] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 733.229701] env[68279]: value = "task-2962763" [ 733.229701] env[68279]: _type = "Task" [ 733.229701] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.238715] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962763, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.284208] env[68279]: DEBUG nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 733.309773] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 733.310058] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.310225] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 733.310410] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.310556] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 733.310704] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 733.310915] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 733.311084] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 733.311253] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 733.311437] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 733.311586] env[68279]: DEBUG nova.virt.hardware [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 733.312613] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a45a1a6-f9d9-49a7-92ef-11ccd0fd133c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.321858] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040638cc-3be7-452b-99f9-c8594951fa88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.478134] env[68279]: DEBUG nova.scheduler.client.report [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 733.525912] env[68279]: DEBUG nova.compute.manager [req-5a5f2166-53fe-4e5d-b04b-73271f7b6fcf req-1bc92b87-cebf-41e6-9107-bd8fae494d8d service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received event network-vif-plugged-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 733.526083] env[68279]: DEBUG oslo_concurrency.lockutils [req-5a5f2166-53fe-4e5d-b04b-73271f7b6fcf req-1bc92b87-cebf-41e6-9107-bd8fae494d8d service nova] Acquiring lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.526364] env[68279]: DEBUG oslo_concurrency.lockutils [req-5a5f2166-53fe-4e5d-b04b-73271f7b6fcf req-1bc92b87-cebf-41e6-9107-bd8fae494d8d service nova] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.527084] env[68279]: DEBUG oslo_concurrency.lockutils [req-5a5f2166-53fe-4e5d-b04b-73271f7b6fcf req-1bc92b87-cebf-41e6-9107-bd8fae494d8d service nova] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.527084] env[68279]: DEBUG nova.compute.manager [req-5a5f2166-53fe-4e5d-b04b-73271f7b6fcf req-1bc92b87-cebf-41e6-9107-bd8fae494d8d service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] No waiting events found dispatching network-vif-plugged-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 733.527646] env[68279]: WARNING nova.compute.manager [req-5a5f2166-53fe-4e5d-b04b-73271f7b6fcf req-1bc92b87-cebf-41e6-9107-bd8fae494d8d service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received unexpected event network-vif-plugged-ad06c5c0-cc93-4b02-968c-9e81681ae50a for instance with vm_state building and task_state spawning. [ 733.533156] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.615780] env[68279]: DEBUG nova.network.neutron [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Successfully updated port: ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.740981] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962763, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.983363] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.983690] env[68279]: DEBUG nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 733.986968] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.228s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.986968] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.989138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.587s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.990552] env[68279]: INFO nova.compute.claims [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.016555] env[68279]: INFO nova.scheduler.client.report [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Deleted allocations for instance 010e5bfc-814c-4bde-8a16-7c2009ee13b6 [ 734.119479] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.119706] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.119862] env[68279]: DEBUG nova.network.neutron [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.241353] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962763, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.497103] env[68279]: DEBUG nova.compute.utils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 734.499965] env[68279]: DEBUG nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 734.500155] env[68279]: DEBUG nova.network.neutron [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 734.524301] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f62962d8-0edd-4a5b-abd1-a98737de39f2 tempest-ImagesOneServerTestJSON-1233753175 tempest-ImagesOneServerTestJSON-1233753175-project-member] Lock "010e5bfc-814c-4bde-8a16-7c2009ee13b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.625s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.566318] env[68279]: DEBUG nova.policy [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05112d23230b49849a2aad14fd25a041', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d74a8e71e1b40cc858089e4af0d4cfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 734.654241] env[68279]: DEBUG nova.network.neutron [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.743598] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962763, 'name': CreateVM_Task, 'duration_secs': 1.348971} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.743821] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 734.744599] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.744809] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.745201] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 734.745506] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1291b6e1-b202-44ac-ac32-e31ff26fac24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.752847] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 734.752847] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b14b3f-6ecb-5b79-86e9-ab70d91bc3d9" [ 734.752847] env[68279]: _type = "Task" [ 734.752847] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.765122] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b14b3f-6ecb-5b79-86e9-ab70d91bc3d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.881580] env[68279]: DEBUG nova.network.neutron [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [{"id": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "address": "fa:16:3e:88:96:e0", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06c5c0-cc", "ovs_interfaceid": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.003247] env[68279]: DEBUG nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 735.083284] env[68279]: DEBUG nova.network.neutron [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Successfully created port: de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.104770] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.105173] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.265611] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b14b3f-6ecb-5b79-86e9-ab70d91bc3d9, 'name': SearchDatastore_Task, 'duration_secs': 0.011926} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.266364] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.266714] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.267075] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.267635] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.267923] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.268295] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c7c4eba-75c4-4b27-a09f-1935afd3f3e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.284194] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.284194] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 735.284194] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2e3fe1-1e48-4428-9070-ced7f0420ca8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.294137] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 735.294137] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c0f5d0-281c-9514-5734-8fc28aa83ed5" [ 735.294137] env[68279]: _type = "Task" [ 735.294137] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.302018] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c0f5d0-281c-9514-5734-8fc28aa83ed5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.385568] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Releasing lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.386059] env[68279]: DEBUG nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Instance network_info: |[{"id": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "address": "fa:16:3e:88:96:e0", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06c5c0-cc", "ovs_interfaceid": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 735.389136] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:96:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad06c5c0-cc93-4b02-968c-9e81681ae50a', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.396638] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Creating folder: Project (fadbbd31a4314d12a378689150d3a24d). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.399400] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1feeeec-5e9d-4329-88b7-facd7d6b8698 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.411359] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Created folder: Project (fadbbd31a4314d12a378689150d3a24d) in parent group-v594445. [ 735.413353] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Creating folder: Instances. Parent ref: group-v594552. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.413738] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94e57d38-3aef-4b02-8852-4e0b83576fcf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.444649] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Created folder: Instances in parent group-v594552. [ 735.444649] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.444649] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.444649] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2dc0c77c-0649-4305-b24e-a5d45bb5b849 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.471264] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.471264] env[68279]: value = "task-2962766" [ 735.471264] env[68279]: _type = "Task" [ 735.471264] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.481496] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962766, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.516269] env[68279]: INFO nova.virt.block_device [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Booting with volume b6edfa04-38f3-4e20-9c83-faca792e474a at /dev/sda [ 735.586209] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a00508a-e4d7-47f0-a9bc-422a3395da65 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.598773] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42720d3b-783d-4ced-b63a-9570358f46f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.617200] env[68279]: DEBUG nova.compute.manager [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.617481] env[68279]: DEBUG nova.compute.manager [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing instance network info cache due to event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 735.617763] env[68279]: DEBUG oslo_concurrency.lockutils [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] Acquiring lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.617956] env[68279]: DEBUG oslo_concurrency.lockutils [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] Acquired lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.618189] env[68279]: DEBUG nova.network.neutron [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 735.651939] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-427eafd0-98c6-48f4-bda2-c652c0438dab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.662424] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c87da96-8758-481c-b0b1-ab9ed73a2f02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.675956] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485a46cd-6933-4f92-8e12-9278e7aeb8a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.685176] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef50b6c7-b5a2-4439-8c8f-69b6e339503a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.729835] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2c35ab-b73c-4ab4-a97a-a29aba23046f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.733072] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7478fa06-6ff3-4fcb-b81a-a8268d2fc862 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.741728] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8772076b-8c98-45f7-9a69-12d76c4da2dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.745341] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b7b209-641f-45dc-9dda-64dbb3143f76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.760852] env[68279]: DEBUG nova.compute.provider_tree [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.765725] env[68279]: DEBUG nova.virt.block_device [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Updating existing volume attachment record: c73d25f2-65b9-4242-a9bc-c82f5e1e26f6 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 735.803664] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c0f5d0-281c-9514-5734-8fc28aa83ed5, 'name': SearchDatastore_Task, 'duration_secs': 0.032391} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.804745] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff144dec-b996-4ca3-a22e-c231cdb6927d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.810545] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 735.810545] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bde7d2-faca-af6e-ea2c-1f70c356b3e9" [ 735.810545] env[68279]: _type = "Task" [ 735.810545] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.818731] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bde7d2-faca-af6e-ea2c-1f70c356b3e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.979424] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962766, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.270043] env[68279]: DEBUG nova.scheduler.client.report [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.325017] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bde7d2-faca-af6e-ea2c-1f70c356b3e9, 'name': SearchDatastore_Task, 'duration_secs': 0.02843} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.325017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.325017] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 736.325017] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ad1e819-7660-4f31-abb0-8dff8479b9e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.331225] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 736.331225] env[68279]: value = "task-2962767" [ 736.331225] env[68279]: _type = "Task" [ 736.331225] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.339443] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962767, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.483613] env[68279]: DEBUG nova.network.neutron [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updated VIF entry in instance network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 736.484149] env[68279]: DEBUG nova.network.neutron [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [{"id": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "address": "fa:16:3e:88:96:e0", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06c5c0-cc", "ovs_interfaceid": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.488955] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962766, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.777509] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.788s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.778106] env[68279]: DEBUG nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 736.781015] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.173s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.782491] env[68279]: INFO nova.compute.claims [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.842111] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962767, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500482} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.842407] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 736.842817] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.842886] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ac06eaa-e20c-4b48-8ce4-b5e42a80191a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.850697] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 736.850697] env[68279]: value = "task-2962768" [ 736.850697] env[68279]: _type = "Task" [ 736.850697] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.857922] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.984066] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962766, 'name': CreateVM_Task, 'duration_secs': 1.348451} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.984285] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 736.985029] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.985157] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.985462] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 736.985711] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b0edc3c-dae8-49a4-a78e-0a22f59c6384 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.992272] env[68279]: DEBUG oslo_concurrency.lockutils [req-7fc1afab-837d-4fd5-af49-399bfc944a9c req-04d73487-f70f-41b1-865d-62137de401c0 service nova] Releasing lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.992272] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 736.992272] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d673b9-4cfe-06f3-21b2-4e8d65ae9248" [ 736.992272] env[68279]: _type = "Task" [ 736.992272] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.995632] env[68279]: DEBUG nova.network.neutron [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Successfully updated port: de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 737.003195] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d673b9-4cfe-06f3-21b2-4e8d65ae9248, 'name': SearchDatastore_Task, 'duration_secs': 0.008859} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.003475] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.003706] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.003985] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.004095] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.004281] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.004528] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12bc50b6-5a96-4b99-9dd0-684670c9e383 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.012797] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.013026] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.013710] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5868a47c-9d58-4af7-bca8-9d574b0d02f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.019233] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 737.019233] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d07eba-a05c-94d0-6265-6ca5a5e21c9c" [ 737.019233] env[68279]: _type = "Task" [ 737.019233] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.026831] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d07eba-a05c-94d0-6265-6ca5a5e21c9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.287487] env[68279]: DEBUG nova.compute.utils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 737.290803] env[68279]: DEBUG nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 737.290967] env[68279]: DEBUG nova.network.neutron [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.329424] env[68279]: DEBUG nova.policy [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f72f0e3628e1438a80840cdf82642554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7bf4e6f720045e1854859d2966a887b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 737.359339] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063224} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.359624] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.360495] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbcdb76-238a-4691-b52d-c8d02cb5cc11 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.382349] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.385173] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cddfcd9b-554b-490d-8c80-d2d005cd4de4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.405096] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 737.405096] env[68279]: value = "task-2962769" [ 737.405096] env[68279]: _type = "Task" [ 737.405096] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.413130] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962769, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.499265] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Acquiring lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.499265] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Acquired lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.499265] env[68279]: DEBUG nova.network.neutron [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.532426] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d07eba-a05c-94d0-6265-6ca5a5e21c9c, 'name': SearchDatastore_Task, 'duration_secs': 0.009577} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.533296] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24839991-93e5-45a1-b56c-0075a6fa49f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.541490] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 737.541490] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c17788-cd1d-9658-451a-bfccda7575d7" [ 737.541490] env[68279]: _type = "Task" [ 737.541490] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.550978] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c17788-cd1d-9658-451a-bfccda7575d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.631290] env[68279]: DEBUG nova.network.neutron [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Successfully created port: dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.700339] env[68279]: DEBUG nova.compute.manager [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Received event network-vif-plugged-de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.700575] env[68279]: DEBUG oslo_concurrency.lockutils [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] Acquiring lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.700791] env[68279]: DEBUG oslo_concurrency.lockutils [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.700958] env[68279]: DEBUG oslo_concurrency.lockutils [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.701453] env[68279]: DEBUG nova.compute.manager [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] No waiting events found dispatching network-vif-plugged-de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 737.701453] env[68279]: WARNING nova.compute.manager [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Received unexpected event network-vif-plugged-de55f764-d554-4fcc-bc9d-3987f9c39bc3 for instance with vm_state building and task_state spawning. [ 737.701587] env[68279]: DEBUG nova.compute.manager [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Received event network-changed-de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.701720] env[68279]: DEBUG nova.compute.manager [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Refreshing instance network info cache due to event network-changed-de55f764-d554-4fcc-bc9d-3987f9c39bc3. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 737.701819] env[68279]: DEBUG oslo_concurrency.lockutils [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] Acquiring lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.791927] env[68279]: DEBUG nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 737.902351] env[68279]: DEBUG nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.902847] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.903062] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.903214] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.903383] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.903519] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.903655] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.903845] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.903991] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.904188] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.904343] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.904504] env[68279]: DEBUG nova.virt.hardware [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.906938] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f198425-7d3f-4d37-a32f-1f8b14e6d683 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.925503] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.932105] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a8dd93-a0b1-401c-98ef-326467c7749a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.051920] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c17788-cd1d-9658-451a-bfccda7575d7, 'name': SearchDatastore_Task, 'duration_secs': 0.024133} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.054464] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.054734] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] b2e272b3-520a-4ef7-8141-a9d55739d6b9/b2e272b3-520a-4ef7-8141-a9d55739d6b9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.055235] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ce1fb42-fd7b-4786-833f-378e69d022a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.061619] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 738.061619] env[68279]: value = "task-2962770" [ 738.061619] env[68279]: _type = "Task" [ 738.061619] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.069744] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.077184] env[68279]: DEBUG nova.network.neutron [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.362859] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f92bad3-1f3d-42a3-b54e-65afd8a60792 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.372666] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cce0dd-101a-4374-94a8-72150e06215b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.405058] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf28312-1c1d-4f7f-b9eb-d359f21445e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.419301] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b65187-5a8b-4f15-bc46-35ef8a0dbcf1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.426745] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962769, 'name': ReconfigVM_Task, 'duration_secs': 0.774214} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.427464] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Reconfigured VM instance instance-00000022 to attach disk [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2/d8eca7ac-744e-469c-9a87-901f0641f4f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.428169] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15079c3b-fe37-4984-8936-a26665b96d96 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.439069] env[68279]: DEBUG nova.compute.provider_tree [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.446690] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 738.446690] env[68279]: value = "task-2962771" [ 738.446690] env[68279]: _type = "Task" [ 738.446690] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.456836] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962771, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.575068] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962770, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496709} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.575338] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] b2e272b3-520a-4ef7-8141-a9d55739d6b9/b2e272b3-520a-4ef7-8141-a9d55739d6b9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.575540] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.575781] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14ba4e0a-66a5-4f85-87da-fc26eb121b0c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.582063] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 738.582063] env[68279]: value = "task-2962772" [ 738.582063] env[68279]: _type = "Task" [ 738.582063] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.590687] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962772, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.671113] env[68279]: DEBUG nova.network.neutron [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Updating instance_info_cache with network_info: [{"id": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "address": "fa:16:3e:a4:ff:b3", "network": {"id": "32f715fb-b537-4afb-925c-5e538945a3e3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2057410646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d74a8e71e1b40cc858089e4af0d4cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde55f764-d5", "ovs_interfaceid": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.808185] env[68279]: DEBUG nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 738.838175] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 738.838175] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.838175] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 738.838175] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.838175] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 738.838667] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 738.839665] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 738.843033] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 738.843033] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 738.843033] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 738.843033] env[68279]: DEBUG nova.virt.hardware [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 738.843033] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188cb521-7c40-40da-bcc7-babc416879a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.851055] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec84a9e-a399-4cb9-b6fd-3f2b073bef9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.942574] env[68279]: DEBUG nova.scheduler.client.report [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.958668] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962771, 'name': Rename_Task, 'duration_secs': 0.212286} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.959154] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.959459] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b49873b8-2227-4b23-a145-8ef0e8b57ee3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.967460] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 738.967460] env[68279]: value = "task-2962773" [ 738.967460] env[68279]: _type = "Task" [ 738.967460] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.977213] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962773, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.093641] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962772, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067184} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.093641] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.094042] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd76ca4-b1b5-4c80-af0a-e453a2e7ad7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.128013] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] b2e272b3-520a-4ef7-8141-a9d55739d6b9/b2e272b3-520a-4ef7-8141-a9d55739d6b9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.128013] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b348a2a0-78ab-4ae6-8aca-96b4b4df6e58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.154412] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 739.154412] env[68279]: value = "task-2962774" [ 739.154412] env[68279]: _type = "Task" [ 739.154412] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.165311] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962774, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.173997] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Releasing lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.174268] env[68279]: DEBUG nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Instance network_info: |[{"id": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "address": "fa:16:3e:a4:ff:b3", "network": {"id": "32f715fb-b537-4afb-925c-5e538945a3e3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2057410646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d74a8e71e1b40cc858089e4af0d4cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde55f764-d5", "ovs_interfaceid": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 739.174589] env[68279]: DEBUG oslo_concurrency.lockutils [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] Acquired lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.174802] env[68279]: DEBUG nova.network.neutron [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Refreshing network info cache for port de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.176031] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:ff:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4b43a78-f49b-4132-ab2e-6e28769a9498', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de55f764-d554-4fcc-bc9d-3987f9c39bc3', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.187368] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Creating folder: Project (3d74a8e71e1b40cc858089e4af0d4cfa). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.187785] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0fe858a-1c5a-4fda-a312-3f59fc7f6d08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.204184] env[68279]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 739.205264] env[68279]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68279) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 739.205264] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Folder already exists: Project (3d74a8e71e1b40cc858089e4af0d4cfa). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 739.205264] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Creating folder: Instances. Parent ref: group-v594483. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.205264] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eeea853a-b465-4c88-9470-10dbe081eb1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.218595] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Created folder: Instances in parent group-v594483. [ 739.218595] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.218595] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.218595] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59913ce6-6b48-4bc7-a9fd-a42b772fe79b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.240343] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.240343] env[68279]: value = "task-2962777" [ 739.240343] env[68279]: _type = "Task" [ 739.240343] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.249811] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962777, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.262238] env[68279]: DEBUG nova.network.neutron [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Successfully updated port: dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.450051] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.450051] env[68279]: DEBUG nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 739.454231] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.433s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.454629] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.456217] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.100s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.457851] env[68279]: INFO nova.compute.claims [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.478352] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962773, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.494893] env[68279]: INFO nova.scheduler.client.report [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Deleted allocations for instance 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9 [ 739.534410] env[68279]: DEBUG nova.network.neutron [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Updated VIF entry in instance network info cache for port de55f764-d554-4fcc-bc9d-3987f9c39bc3. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 739.534764] env[68279]: DEBUG nova.network.neutron [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Updating instance_info_cache with network_info: [{"id": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "address": "fa:16:3e:a4:ff:b3", "network": {"id": "32f715fb-b537-4afb-925c-5e538945a3e3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2057410646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d74a8e71e1b40cc858089e4af0d4cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde55f764-d5", "ovs_interfaceid": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.668294] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962774, 'name': ReconfigVM_Task, 'duration_secs': 0.344541} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.668294] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Reconfigured VM instance instance-00000023 to attach disk [datastore1] b2e272b3-520a-4ef7-8141-a9d55739d6b9/b2e272b3-520a-4ef7-8141-a9d55739d6b9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.669237] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6ed88c3-4f41-49a5-a082-6dfcf6ca253f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.675737] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 739.675737] env[68279]: value = "task-2962778" [ 739.675737] env[68279]: _type = "Task" [ 739.675737] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.685316] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962778, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.752693] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962777, 'name': CreateVM_Task, 'duration_secs': 0.366691} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.755739] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 739.756286] env[68279]: DEBUG nova.compute.manager [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Received event network-vif-plugged-dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.756641] env[68279]: DEBUG oslo_concurrency.lockutils [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] Acquiring lock "4e157792-f910-492c-ab29-dd3f86cb96a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.756988] env[68279]: DEBUG oslo_concurrency.lockutils [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.758261] env[68279]: DEBUG oslo_concurrency.lockutils [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.758667] env[68279]: DEBUG nova.compute.manager [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] No waiting events found dispatching network-vif-plugged-dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 739.758943] env[68279]: WARNING nova.compute.manager [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Received unexpected event network-vif-plugged-dccc2829-5441-46f6-8b0a-fbfa005d0fa5 for instance with vm_state building and task_state spawning. [ 739.759066] env[68279]: DEBUG nova.compute.manager [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Received event network-changed-dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.759172] env[68279]: DEBUG nova.compute.manager [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Refreshing instance network info cache due to event network-changed-dccc2829-5441-46f6-8b0a-fbfa005d0fa5. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 739.759392] env[68279]: DEBUG oslo_concurrency.lockutils [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] Acquiring lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.759517] env[68279]: DEBUG oslo_concurrency.lockutils [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] Acquired lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.759748] env[68279]: DEBUG nova.network.neutron [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Refreshing network info cache for port dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.761662] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594489', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'name': 'volume-b6edfa04-38f3-4e20-9c83-faca792e474a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5565f0d-ed60-4ac8-bba1-ab46b337dd90', 'attached_at': '', 'detached_at': '', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'serial': 'b6edfa04-38f3-4e20-9c83-faca792e474a'}, 'attachment_id': 'c73d25f2-65b9-4242-a9bc-c82f5e1e26f6', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=68279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 739.761662] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Root volume attach. Driver type: vmdk {{(pid=68279) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 739.763394] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.763817] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ddcbd7-e850-4778-95b5-1633b36630fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.775306] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdabf49-9131-42d6-90c7-f17589747546 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.784871] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615fbe5d-eb42-4cca-a013-ab5c8aeccda0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.797208] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2a77ef05-d948-45e8-b7ba-2fa891c8985f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.805724] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 739.805724] env[68279]: value = "task-2962779" [ 739.805724] env[68279]: _type = "Task" [ 739.805724] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.815880] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962779, 'name': RelocateVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.962085] env[68279]: DEBUG nova.compute.utils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 739.965397] env[68279]: DEBUG nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 739.965647] env[68279]: DEBUG nova.network.neutron [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 739.982384] env[68279]: DEBUG oslo_vmware.api [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962773, 'name': PowerOnVM_Task, 'duration_secs': 0.820524} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.982677] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.982881] env[68279]: DEBUG nova.compute.manager [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.983959] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b371db-3d40-429d-b418-2b323a561684 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.010253] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61c2197b-f35c-451c-a301-44f168c5d834 tempest-ServersNegativeTestMultiTenantJSON-1997436027 tempest-ServersNegativeTestMultiTenantJSON-1997436027-project-member] Lock "6e947ed2-a6aa-42d4-b97e-31db33f6d5f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.386s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.037461] env[68279]: DEBUG oslo_concurrency.lockutils [req-dcd71487-8c76-467a-85f3-16deadef12e4 req-84d6df2e-9b65-4596-bc1a-973f62decced service nova] Releasing lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.116299] env[68279]: DEBUG nova.policy [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eea669f41a64511ad4a642e0501426e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baf34b7de826460bbfd545770af9e534', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 740.194563] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962778, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.299255] env[68279]: DEBUG nova.network.neutron [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.325079] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962779, 'name': RelocateVM_Task} progress is 20%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.407519] env[68279]: DEBUG nova.network.neutron [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.474048] env[68279]: DEBUG nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 740.511806] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.693964] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962778, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.827236] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962779, 'name': RelocateVM_Task, 'duration_secs': 0.710362} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.827484] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 740.827763] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594489', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'name': 'volume-b6edfa04-38f3-4e20-9c83-faca792e474a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5565f0d-ed60-4ac8-bba1-ab46b337dd90', 'attached_at': '', 'detached_at': '', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'serial': 'b6edfa04-38f3-4e20-9c83-faca792e474a'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 740.828964] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d0f1de-5cdd-46b9-a9c1-5bc956849e2d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.853744] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e044e1e-bda2-4295-84dd-0ed2695f75b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.880455] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] volume-b6edfa04-38f3-4e20-9c83-faca792e474a/volume-b6edfa04-38f3-4e20-9c83-faca792e474a.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.884711] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a99f962a-a1f3-4376-b8d6-d3404736aef0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.904522] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 740.904522] env[68279]: value = "task-2962780" [ 740.904522] env[68279]: _type = "Task" [ 740.904522] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.912957] env[68279]: DEBUG oslo_concurrency.lockutils [req-d817b98a-651b-43b8-ac81-2ede31c1ef38 req-d5236ba3-d7b0-453e-87fc-34f595989e86 service nova] Releasing lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.913577] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962780, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.914831] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.914831] env[68279]: DEBUG nova.network.neutron [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.129769] env[68279]: DEBUG nova.network.neutron [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Successfully created port: f572d86a-c060-4562-9aee-88a99349e45a {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.142527] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851ab0f3-0f9b-4927-9961-7f6d83ca0365 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.149566] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0c1224-44e3-44a7-bdfc-6420d01fd12a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.182683] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da53bb2-fcc7-491a-9f48-6a5945dcd36f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.192510] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc20d165-771d-4b43-827a-1231c2a6b506 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.196197] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962778, 'name': Rename_Task, 'duration_secs': 1.151331} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.196537] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.197030] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4aaef25d-412a-4c8b-bf37-60d50226d09b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.211021] env[68279]: DEBUG nova.compute.provider_tree [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.214205] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 741.214205] env[68279]: value = "task-2962781" [ 741.214205] env[68279]: _type = "Task" [ 741.214205] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.221895] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962781, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.271958] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "d8eca7ac-744e-469c-9a87-901f0641f4f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.272268] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.272754] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "d8eca7ac-744e-469c-9a87-901f0641f4f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.273015] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.273227] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.277921] env[68279]: INFO nova.compute.manager [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Terminating instance [ 741.415569] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962780, 'name': ReconfigVM_Task, 'duration_secs': 0.24218} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.415870] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Reconfigured VM instance instance-00000024 to attach disk [datastore1] volume-b6edfa04-38f3-4e20-9c83-faca792e474a/volume-b6edfa04-38f3-4e20-9c83-faca792e474a.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.423034] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a5ec771-d425-4eb1-8d80-6b0ed526f43c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.440886] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 741.440886] env[68279]: value = "task-2962782" [ 741.440886] env[68279]: _type = "Task" [ 741.440886] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.449684] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962782, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.466586] env[68279]: DEBUG nova.network.neutron [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.486089] env[68279]: DEBUG nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 741.517858] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 741.519128] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.519128] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 741.519128] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.519128] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 741.519128] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 741.519343] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 741.519929] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 741.519929] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 741.520096] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 741.521016] env[68279]: DEBUG nova.virt.hardware [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 741.521883] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70a3575-62d5-4266-b960-2d8c7985ff03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.536127] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cce2a5-f77e-47de-8a8c-5b2be29b1290 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.634511] env[68279]: DEBUG nova.network.neutron [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updating instance_info_cache with network_info: [{"id": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "address": "fa:16:3e:c6:15:27", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdccc2829-54", "ovs_interfaceid": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.714021] env[68279]: DEBUG nova.scheduler.client.report [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.729280] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962781, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.786025] env[68279]: DEBUG nova.compute.manager [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 741.786025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 741.786025] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956493cf-8d18-4e1a-a07e-4b304310d738 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.795413] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 741.795984] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c94f4e78-261c-43e4-9fc4-70c0b0249d8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.805455] env[68279]: DEBUG oslo_vmware.api [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 741.805455] env[68279]: value = "task-2962783" [ 741.805455] env[68279]: _type = "Task" [ 741.805455] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.815478] env[68279]: DEBUG oslo_vmware.api [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.954765] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962782, 'name': ReconfigVM_Task, 'duration_secs': 0.261044} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.955084] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594489', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'name': 'volume-b6edfa04-38f3-4e20-9c83-faca792e474a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5565f0d-ed60-4ac8-bba1-ab46b337dd90', 'attached_at': '', 'detached_at': '', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'serial': 'b6edfa04-38f3-4e20-9c83-faca792e474a'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 741.955632] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40687ce7-eee3-4322-9466-ea94b0fd5fc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.962882] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 741.962882] env[68279]: value = "task-2962784" [ 741.962882] env[68279]: _type = "Task" [ 741.962882] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.977324] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962784, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.132457] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "4021edd3-346e-44e5-9419-38181cc91c6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.132697] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "4021edd3-346e-44e5-9419-38181cc91c6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.137502] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Releasing lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.137794] env[68279]: DEBUG nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Instance network_info: |[{"id": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "address": "fa:16:3e:c6:15:27", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdccc2829-54", "ovs_interfaceid": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 742.138178] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:15:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dccc2829-5441-46f6-8b0a-fbfa005d0fa5', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.145562] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.146042] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.146271] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-940060e4-e3e3-4a44-94de-567b05e4698b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.165535] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.165535] env[68279]: value = "task-2962785" [ 742.165535] env[68279]: _type = "Task" [ 742.165535] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.173601] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962785, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.225609] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.225609] env[68279]: DEBUG nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 742.230669] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.909s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.230669] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.232425] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.415s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.233891] env[68279]: INFO nova.compute.claims [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.236592] env[68279]: DEBUG oslo_vmware.api [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2962781, 'name': PowerOnVM_Task, 'duration_secs': 0.912223} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.237450] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.240016] env[68279]: INFO nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Took 8.95 seconds to spawn the instance on the hypervisor. [ 742.240016] env[68279]: DEBUG nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.240016] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc111f9-d70c-4c17-8f13-413e44e8c90c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.286030] env[68279]: INFO nova.scheduler.client.report [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleted allocations for instance bf4e6484-d17d-4244-9163-1ef0012874b8 [ 742.321730] env[68279]: DEBUG oslo_vmware.api [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962783, 'name': PowerOffVM_Task, 'duration_secs': 0.29374} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.321868] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 742.322037] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 742.325026] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8fa1c32-8a32-4521-83a5-aedfc384913a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.398448] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 742.398714] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 742.398899] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleting the datastore file [datastore1] d8eca7ac-744e-469c-9a87-901f0641f4f2 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.401235] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0e0955b-0059-4373-a095-9c5265ef0d1a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.408858] env[68279]: DEBUG oslo_vmware.api [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 742.408858] env[68279]: value = "task-2962787" [ 742.408858] env[68279]: _type = "Task" [ 742.408858] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.421467] env[68279]: DEBUG oslo_vmware.api [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.473675] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962784, 'name': Rename_Task, 'duration_secs': 0.152939} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.474693] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.474939] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb3fdaf0-bfb9-4bea-bd42-c7888a9eb1b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.484357] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 742.484357] env[68279]: value = "task-2962788" [ 742.484357] env[68279]: _type = "Task" [ 742.484357] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.492509] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.676951] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962785, 'name': CreateVM_Task, 'duration_secs': 0.371583} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.678080] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 742.678682] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.678772] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.679206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 742.681615] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71c216c2-ae82-418b-b7c1-972a91f4298d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.686316] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 742.686316] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52384258-8857-3ca0-963c-4d56991921ab" [ 742.686316] env[68279]: _type = "Task" [ 742.686316] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.695182] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52384258-8857-3ca0-963c-4d56991921ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.738798] env[68279]: DEBUG nova.compute.utils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 742.744494] env[68279]: DEBUG nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 742.757827] env[68279]: INFO nova.compute.manager [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Took 45.77 seconds to build instance. [ 742.793199] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a1013779-8676-4fb2-b1f9-e83eee7854dc tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "bf4e6484-d17d-4244-9163-1ef0012874b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.295s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.920712] env[68279]: DEBUG oslo_vmware.api [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181118} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.921015] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.921743] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 742.922017] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.922300] env[68279]: INFO nova.compute.manager [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 742.922687] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.922983] env[68279]: DEBUG nova.compute.manager [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 742.923144] env[68279]: DEBUG nova.network.neutron [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.992642] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962788, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.115022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "c62a0d0e-8869-482a-a687-c628b96d6e22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.115272] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.198331] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52384258-8857-3ca0-963c-4d56991921ab, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.198711] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.198994] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.199326] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.199563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.199852] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.200227] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eb18c09-7716-4283-a72b-17aed3630c55 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.213593] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.213823] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.214876] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e5f2dc-4f96-4caf-bad3-c9b84727e5bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.221433] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 743.221433] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52be1a80-d1c0-df48-cb5b-96996bf41502" [ 743.221433] env[68279]: _type = "Task" [ 743.221433] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.230302] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52be1a80-d1c0-df48-cb5b-96996bf41502, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.241844] env[68279]: DEBUG nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.259538] env[68279]: DEBUG oslo_concurrency.lockutils [None req-edce196e-ace8-48de-bb17-2efcfff7563a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.397s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.496078] env[68279]: DEBUG oslo_vmware.api [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2962788, 'name': PowerOnVM_Task, 'duration_secs': 0.76996} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.499321] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.499626] env[68279]: INFO nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Took 5.60 seconds to spawn the instance on the hypervisor. [ 743.499989] env[68279]: DEBUG nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.501243] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd69abb-5d54-4f50-8d4c-d10bf31a0770 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.552564] env[68279]: DEBUG nova.compute.manager [req-a40a0385-01cc-4b11-b6c9-176b03cd8f10 req-6abbfa07-a309-4778-9193-4293fc03fced service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Received event network-vif-deleted-c349d19b-7d87-4e7b-b600-7943303ac6e0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.552564] env[68279]: INFO nova.compute.manager [req-a40a0385-01cc-4b11-b6c9-176b03cd8f10 req-6abbfa07-a309-4778-9193-4293fc03fced service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Neutron deleted interface c349d19b-7d87-4e7b-b600-7943303ac6e0; detaching it from the instance and deleting it from the info cache [ 743.552564] env[68279]: DEBUG nova.network.neutron [req-a40a0385-01cc-4b11-b6c9-176b03cd8f10 req-6abbfa07-a309-4778-9193-4293fc03fced service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.574441] env[68279]: DEBUG nova.compute.manager [req-0fa6e795-6a7b-435e-b141-facd1cf69b5f req-ef5fc3ee-4818-4f69-8a99-074ea189b717 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Received event network-vif-plugged-f572d86a-c060-4562-9aee-88a99349e45a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.574979] env[68279]: DEBUG oslo_concurrency.lockutils [req-0fa6e795-6a7b-435e-b141-facd1cf69b5f req-ef5fc3ee-4818-4f69-8a99-074ea189b717 service nova] Acquiring lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.575724] env[68279]: DEBUG oslo_concurrency.lockutils [req-0fa6e795-6a7b-435e-b141-facd1cf69b5f req-ef5fc3ee-4818-4f69-8a99-074ea189b717 service nova] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.576223] env[68279]: DEBUG oslo_concurrency.lockutils [req-0fa6e795-6a7b-435e-b141-facd1cf69b5f req-ef5fc3ee-4818-4f69-8a99-074ea189b717 service nova] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.576902] env[68279]: DEBUG nova.compute.manager [req-0fa6e795-6a7b-435e-b141-facd1cf69b5f req-ef5fc3ee-4818-4f69-8a99-074ea189b717 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] No waiting events found dispatching network-vif-plugged-f572d86a-c060-4562-9aee-88a99349e45a {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 743.576902] env[68279]: WARNING nova.compute.manager [req-0fa6e795-6a7b-435e-b141-facd1cf69b5f req-ef5fc3ee-4818-4f69-8a99-074ea189b717 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Received unexpected event network-vif-plugged-f572d86a-c060-4562-9aee-88a99349e45a for instance with vm_state building and task_state spawning. [ 743.714691] env[68279]: DEBUG nova.network.neutron [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Successfully updated port: f572d86a-c060-4562-9aee-88a99349e45a {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.739426] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52be1a80-d1c0-df48-cb5b-96996bf41502, 'name': SearchDatastore_Task, 'duration_secs': 0.01267} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.744926] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52e2c592-fb61-482a-8892-8c18c5dfcce9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.754732] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 743.754732] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5237df1a-2e38-f1bf-a3a5-28aee4b88f10" [ 743.754732] env[68279]: _type = "Task" [ 743.754732] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.762235] env[68279]: DEBUG nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 743.768341] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5237df1a-2e38-f1bf-a3a5-28aee4b88f10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.852044] env[68279]: DEBUG nova.network.neutron [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.876130] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b21a69-504e-48b8-b03a-ea439f9f049d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.885918] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcd355a-453d-4c1f-9b92-167a56a47a3a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.926152] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fd4635-6347-41b8-b4a1-ac805b4bff7c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.934673] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d70f0c-ab15-4920-82c7-d4d9adf8fe62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.948616] env[68279]: DEBUG nova.compute.provider_tree [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.029451] env[68279]: INFO nova.compute.manager [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Took 43.60 seconds to build instance. [ 744.057935] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2316b3ae-4946-4a45-a0dc-d586f893034a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.067614] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aec1970-dd7d-4696-b157-d379672efd53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.107712] env[68279]: DEBUG nova.compute.manager [req-a40a0385-01cc-4b11-b6c9-176b03cd8f10 req-6abbfa07-a309-4778-9193-4293fc03fced service nova] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Detach interface failed, port_id=c349d19b-7d87-4e7b-b600-7943303ac6e0, reason: Instance d8eca7ac-744e-469c-9a87-901f0641f4f2 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 744.218134] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "refresh_cache-33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.218250] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquired lock "refresh_cache-33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.218470] env[68279]: DEBUG nova.network.neutron [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.257280] env[68279]: DEBUG nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 744.283329] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5237df1a-2e38-f1bf-a3a5-28aee4b88f10, 'name': SearchDatastore_Task, 'duration_secs': 0.011439} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.288280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.288653] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4e157792-f910-492c-ab29-dd3f86cb96a8/4e157792-f910-492c-ab29-dd3f86cb96a8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.290483] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e119db8a-c514-47ea-b071-df200d9d4251 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.303227] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 744.303227] env[68279]: value = "task-2962789" [ 744.303227] env[68279]: _type = "Task" [ 744.303227] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.306775] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.307123] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.307201] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.307380] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.307521] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.307662] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.307861] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.308023] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.308227] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.308413] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.308691] env[68279]: DEBUG nova.virt.hardware [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.309746] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a28737-5634-41dc-aaf2-300fdee98770 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.316767] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.322842] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.326215] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb997fd2-9212-4264-b863-f0ed732ed025 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.345253] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 744.350911] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Creating folder: Project (6cad78ac69ab427bbba9070d6eccb36f). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 744.352152] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbe7eb7c-281e-43b6-a24d-7006547246f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.354137] env[68279]: INFO nova.compute.manager [-] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Took 1.43 seconds to deallocate network for instance. [ 744.361414] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Created folder: Project (6cad78ac69ab427bbba9070d6eccb36f) in parent group-v594445. [ 744.361580] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Creating folder: Instances. Parent ref: group-v594558. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 744.362028] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0ccd41d-18a9-4ec7-8fee-94f2e9d5a68b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.373057] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Created folder: Instances in parent group-v594558. [ 744.374027] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 744.374027] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 744.374027] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9ace9c4-f241-4d30-ba81-73cbbbc224c4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.392243] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 744.392243] env[68279]: value = "task-2962792" [ 744.392243] env[68279]: _type = "Task" [ 744.392243] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.400958] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962792, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.452263] env[68279]: DEBUG nova.scheduler.client.report [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 744.534712] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f77f14e3-b47a-44ae-bdc9-aa34ded754c6 tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.718s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.806658] env[68279]: DEBUG nova.network.neutron [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.814836] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962789, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4768} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.815254] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4e157792-f910-492c-ab29-dd3f86cb96a8/4e157792-f910-492c-ab29-dd3f86cb96a8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 744.817479] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.817479] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46d987ff-3763-4e8e-92db-2bedac6f432f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.825064] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 744.825064] env[68279]: value = "task-2962793" [ 744.825064] env[68279]: _type = "Task" [ 744.825064] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.832648] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962793, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.867569] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.902788] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962792, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.961706] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.962258] env[68279]: DEBUG nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 744.964752] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.588s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.966147] env[68279]: INFO nova.compute.claims [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.035826] env[68279]: DEBUG nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 745.231688] env[68279]: DEBUG nova.network.neutron [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Updating instance_info_cache with network_info: [{"id": "f572d86a-c060-4562-9aee-88a99349e45a", "address": "fa:16:3e:17:28:0c", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf572d86a-c0", "ovs_interfaceid": "f572d86a-c060-4562-9aee-88a99349e45a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.333684] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962793, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066526} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.337018] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.337018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa8b99a-93c3-40d9-bb2e-b6a6af22a8b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.361430] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 4e157792-f910-492c-ab29-dd3f86cb96a8/4e157792-f910-492c-ab29-dd3f86cb96a8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.361941] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-138d1910-19ab-470a-a23d-cd478b3e39de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.383415] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 745.383415] env[68279]: value = "task-2962794" [ 745.383415] env[68279]: _type = "Task" [ 745.383415] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.391880] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962794, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.403102] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962792, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.471799] env[68279]: DEBUG nova.compute.utils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 745.475605] env[68279]: DEBUG nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 745.475777] env[68279]: DEBUG nova.network.neutron [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.567127] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.577539] env[68279]: DEBUG nova.policy [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a747e97938fc4ae9a870d65b4a82b0f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66f2613477b84e80a089465581fa6af5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.719438] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "0daf31be-c547-46ae-aa91-f99e191e1c76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.719691] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.737134] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Releasing lock "refresh_cache-33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.737291] env[68279]: DEBUG nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Instance network_info: |[{"id": "f572d86a-c060-4562-9aee-88a99349e45a", "address": "fa:16:3e:17:28:0c", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf572d86a-c0", "ovs_interfaceid": "f572d86a-c060-4562-9aee-88a99349e45a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 745.737721] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:28:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f572d86a-c060-4562-9aee-88a99349e45a', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.748884] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Creating folder: Project (baf34b7de826460bbfd545770af9e534). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.749514] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c8d2007-040c-4c7f-87f1-3d10fc513f56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.767116] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Created folder: Project (baf34b7de826460bbfd545770af9e534) in parent group-v594445. [ 745.767345] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Creating folder: Instances. Parent ref: group-v594561. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.767630] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0fcda8c-466f-4d31-b6c6-9e543d17366b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.776855] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Created folder: Instances in parent group-v594561. [ 745.777123] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 745.777327] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 745.777531] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fcfb15f-9378-4f23-81ab-49b37ed25869 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.800892] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.800892] env[68279]: value = "task-2962797" [ 745.800892] env[68279]: _type = "Task" [ 745.800892] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.810844] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962797, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.894759] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962794, 'name': ReconfigVM_Task, 'duration_secs': 0.295074} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.899810] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 4e157792-f910-492c-ab29-dd3f86cb96a8/4e157792-f910-492c-ab29-dd3f86cb96a8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.899810] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a538526-1c97-42e5-be21-c247ef8e91d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.905950] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962792, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.907611] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 745.907611] env[68279]: value = "task-2962798" [ 745.907611] env[68279]: _type = "Task" [ 745.907611] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.931537] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962798, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.976490] env[68279]: DEBUG nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 746.153263] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.153612] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.314503] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962797, 'name': CreateVM_Task, 'duration_secs': 0.383823} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.317343] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.318450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.318450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.319125] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 746.319125] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41418149-9f5c-4b62-a711-b1970ba0dbf0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.324788] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 746.324788] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52006c60-2955-eef7-4a83-eaf02320d466" [ 746.324788] env[68279]: _type = "Task" [ 746.324788] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.335527] env[68279]: DEBUG nova.compute.manager [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Received event network-changed-f572d86a-c060-4562-9aee-88a99349e45a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.335709] env[68279]: DEBUG nova.compute.manager [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Refreshing instance network info cache due to event network-changed-f572d86a-c060-4562-9aee-88a99349e45a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 746.337032] env[68279]: DEBUG oslo_concurrency.lockutils [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] Acquiring lock "refresh_cache-33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.337032] env[68279]: DEBUG oslo_concurrency.lockutils [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] Acquired lock "refresh_cache-33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.337032] env[68279]: DEBUG nova.network.neutron [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Refreshing network info cache for port f572d86a-c060-4562-9aee-88a99349e45a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 746.337608] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52006c60-2955-eef7-4a83-eaf02320d466, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.383272] env[68279]: DEBUG nova.network.neutron [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Successfully created port: 5859bf70-4474-4ff0-8e13-13176937abc5 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.406773] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962792, 'name': CreateVM_Task, 'duration_secs': 1.559557} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.407142] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.407654] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.417257] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962798, 'name': Rename_Task, 'duration_secs': 0.162739} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.420137] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.420658] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3117f69a-7eae-437e-b9a2-d6d20d578bde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.427946] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 746.427946] env[68279]: value = "task-2962799" [ 746.427946] env[68279]: _type = "Task" [ 746.427946] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.441111] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.670662] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7db84d-c7d0-4fe5-9f9b-d0c88570d302 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.680668] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e632fcc-7182-49b7-9417-1a26fbea82a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.714140] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6899bb27-c995-4961-84f7-22899e18c36b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.722416] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb209e4a-ac74-49ba-9d99-5fe7da68727c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.736836] env[68279]: DEBUG nova.compute.provider_tree [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.836355] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52006c60-2955-eef7-4a83-eaf02320d466, 'name': SearchDatastore_Task, 'duration_secs': 0.010254} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.836355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.836355] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.836355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.836355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.836355] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.837063] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.837063] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 746.837063] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fed799b-30e1-4853-bac2-6da30abb82ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.838921] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a429dc05-7cc3-43aa-bcc7-81a8971d4ab7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.846072] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 746.846072] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c172d7-0e45-7995-a497-9d8543b12c78" [ 746.846072] env[68279]: _type = "Task" [ 746.846072] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.850914] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.851111] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 746.851816] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fde6bd7b-13c4-421b-9208-06c7b72f1293 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.861517] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c172d7-0e45-7995-a497-9d8543b12c78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.862784] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 746.862784] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52840469-ba6b-eb23-305a-5913cbe3d8a7" [ 746.862784] env[68279]: _type = "Task" [ 746.862784] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.871571] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52840469-ba6b-eb23-305a-5913cbe3d8a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.939561] env[68279]: DEBUG oslo_vmware.api [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962799, 'name': PowerOnVM_Task, 'duration_secs': 0.464019} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.939838] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 746.940040] env[68279]: INFO nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Took 8.13 seconds to spawn the instance on the hypervisor. [ 746.940219] env[68279]: DEBUG nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.941014] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ec7692-f608-4077-9376-e77e7a857fe9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.990149] env[68279]: DEBUG nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 747.021219] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 747.021219] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.021219] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 747.021219] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.021219] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 747.021219] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 747.021865] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 747.022168] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 747.022550] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 747.022837] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 747.023188] env[68279]: DEBUG nova.virt.hardware [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 747.024133] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc33dfcf-5091-4a80-93a6-5b3d9c7f3633 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.033269] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7648485b-e25a-4fb7-aaad-8aee71751cd2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.083944] env[68279]: DEBUG nova.network.neutron [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Updated VIF entry in instance network info cache for port f572d86a-c060-4562-9aee-88a99349e45a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 747.083944] env[68279]: DEBUG nova.network.neutron [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Updating instance_info_cache with network_info: [{"id": "f572d86a-c060-4562-9aee-88a99349e45a", "address": "fa:16:3e:17:28:0c", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf572d86a-c0", "ovs_interfaceid": "f572d86a-c060-4562-9aee-88a99349e45a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.243650] env[68279]: DEBUG nova.scheduler.client.report [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.364573] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c172d7-0e45-7995-a497-9d8543b12c78, 'name': SearchDatastore_Task, 'duration_secs': 0.01843} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.369450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.369450] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.369450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.375807] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52840469-ba6b-eb23-305a-5913cbe3d8a7, 'name': SearchDatastore_Task, 'duration_secs': 0.008821} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.376913] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f318e37-2334-4163-94e8-bddf87e448ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.383597] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 747.383597] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d5f5cb-3275-7ea7-5a05-12a2583bcb93" [ 747.383597] env[68279]: _type = "Task" [ 747.383597] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.393112] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d5f5cb-3275-7ea7-5a05-12a2583bcb93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.458912] env[68279]: INFO nova.compute.manager [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Took 42.08 seconds to build instance. [ 747.585868] env[68279]: DEBUG oslo_concurrency.lockutils [req-eeb9399e-a41a-4863-a598-7144be0d4741 req-12b97bf0-4ae1-4845-8417-2fb4c8832a96 service nova] Releasing lock "refresh_cache-33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.751437] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.751955] env[68279]: DEBUG nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 747.754948] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.871s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.756334] env[68279]: INFO nova.compute.claims [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.899693] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d5f5cb-3275-7ea7-5a05-12a2583bcb93, 'name': SearchDatastore_Task, 'duration_secs': 0.015936} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.905017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.905017] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f/33f3fc4a-319b-4dd9-90b5-05ee5483ac7f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 747.905017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.905017] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.905017] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58386a7a-cada-4493-82e6-3e65f4b498ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.906256] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3f1a2d2-d835-430c-8acb-945fad58a259 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.913917] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 747.913917] env[68279]: value = "task-2962800" [ 747.913917] env[68279]: _type = "Task" [ 747.913917] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.918338] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.918656] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 747.919787] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-053388ab-444f-431d-8d61-a0fe00021ae4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.925286] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962800, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.930408] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 747.930408] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52653df8-d6cf-84d9-d78c-0b9f0b119ab2" [ 747.930408] env[68279]: _type = "Task" [ 747.930408] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.940140] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52653df8-d6cf-84d9-d78c-0b9f0b119ab2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.962276] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25b8d112-a78e-4de5-a53a-922cb17465f3 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.187s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.261664] env[68279]: DEBUG nova.compute.utils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 748.270456] env[68279]: DEBUG nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 748.270772] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 748.332823] env[68279]: DEBUG nova.policy [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c390fb2ba95249028d9cb30962259b12', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4632448f387a49eda08bcdc55b94a84c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 748.425011] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962800, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507405} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.425306] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f/33f3fc4a-319b-4dd9-90b5-05ee5483ac7f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 748.425531] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 748.425789] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca8a62f2-634b-40b5-91b9-3ac0b51f3138 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.435763] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 748.435763] env[68279]: value = "task-2962801" [ 748.435763] env[68279]: _type = "Task" [ 748.435763] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.455797] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52653df8-d6cf-84d9-d78c-0b9f0b119ab2, 'name': SearchDatastore_Task, 'duration_secs': 0.01092} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.456612] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962801, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.459343] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-810020d1-4b0d-4c1e-81f0-85720a699a5b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.464676] env[68279]: DEBUG nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 748.474124] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 748.474124] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52236c5a-b127-cba2-2c50-c2e4f11798c1" [ 748.474124] env[68279]: _type = "Task" [ 748.474124] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.484527] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52236c5a-b127-cba2-2c50-c2e4f11798c1, 'name': SearchDatastore_Task, 'duration_secs': 0.010631} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.484810] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.485087] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 748.485337] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c50b33e9-aab9-4759-804f-9eae7d11691e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.497534] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 748.497534] env[68279]: value = "task-2962802" [ 748.497534] env[68279]: _type = "Task" [ 748.497534] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.506744] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.528089] env[68279]: DEBUG nova.network.neutron [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Successfully updated port: 5859bf70-4474-4ff0-8e13-13176937abc5 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.575062] env[68279]: DEBUG nova.compute.manager [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Received event network-changed-de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 748.575325] env[68279]: DEBUG nova.compute.manager [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Refreshing instance network info cache due to event network-changed-de55f764-d554-4fcc-bc9d-3987f9c39bc3. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 748.575528] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Acquiring lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.575683] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Acquired lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.575852] env[68279]: DEBUG nova.network.neutron [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Refreshing network info cache for port de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.665429] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Successfully created port: 5d6278f5-880f-41ed-ad7e-f1211fdf3abf {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.765802] env[68279]: DEBUG nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 748.956724] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962801, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073392} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.956724] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 748.957230] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0978136e-dee6-4189-87be-09addfc806f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.988863] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f/33f3fc4a-319b-4dd9-90b5-05ee5483ac7f.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 748.994027] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-124c80f2-e229-49f2-b490-95683ce06222 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.012855] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Successfully created port: 471c04b2-cefa-42ee-b72b-a233b1b330c2 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 749.027017] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485333} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.027120] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.027352] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.028133] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 749.028133] env[68279]: value = "task-2962803" [ 749.028133] env[68279]: _type = "Task" [ 749.028133] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.028133] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-234ecdaf-8e46-47e8-a6e3-19346efb0d41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.032093] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.038099] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "refresh_cache-05b94aa5-3efc-4790-9d98-c2658b8e8b4b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.038099] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquired lock "refresh_cache-05b94aa5-3efc-4790-9d98-c2658b8e8b4b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.038099] env[68279]: DEBUG nova.network.neutron [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.047741] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "4e157792-f910-492c-ab29-dd3f86cb96a8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.048071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.048327] env[68279]: INFO nova.compute.manager [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Rebooting instance [ 749.049803] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962803, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.050178] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 749.050178] env[68279]: value = "task-2962804" [ 749.050178] env[68279]: _type = "Task" [ 749.050178] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.062413] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962804, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.478519] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9f876c-e273-43ac-9fc5-f690e57283b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.487725] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209a1459-25fa-45ef-b04b-0eda0009ace4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.493715] env[68279]: DEBUG nova.network.neutron [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Updated VIF entry in instance network info cache for port de55f764-d554-4fcc-bc9d-3987f9c39bc3. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.494383] env[68279]: DEBUG nova.network.neutron [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Updating instance_info_cache with network_info: [{"id": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "address": "fa:16:3e:a4:ff:b3", "network": {"id": "32f715fb-b537-4afb-925c-5e538945a3e3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2057410646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d74a8e71e1b40cc858089e4af0d4cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde55f764-d5", "ovs_interfaceid": "de55f764-d554-4fcc-bc9d-3987f9c39bc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.529270] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c95305-383c-4651-8354-fe7bc1b7d4b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.548819] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6803a9-37b0-4c3b-a961-888cf312458e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.555628] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962803, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.573272] env[68279]: DEBUG nova.compute.provider_tree [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.578985] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962804, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066005} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.579767] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.580608] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2a5168-554b-4956-8fdd-9ae2d7f831d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.584563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.584563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquired lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.584563] env[68279]: DEBUG nova.network.neutron [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.607287] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.608355] env[68279]: DEBUG nova.network.neutron [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.610730] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d33f47f-42f9-4b76-a829-73776ed4def1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.634336] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 749.634336] env[68279]: value = "task-2962805" [ 749.634336] env[68279]: _type = "Task" [ 749.634336] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.644282] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962805, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.784969] env[68279]: DEBUG nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 749.813765] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 749.814090] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.814270] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 749.814478] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.814644] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 749.814858] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 749.815102] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 749.815282] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 749.815465] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 749.815629] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 749.815819] env[68279]: DEBUG nova.virt.hardware [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 749.816798] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcc9875-91b4-4396-9269-f6070f96167a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.825769] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53790503-d1f7-4871-9039-b0d8d90dac15 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.891952] env[68279]: DEBUG nova.network.neutron [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Updating instance_info_cache with network_info: [{"id": "5859bf70-4474-4ff0-8e13-13176937abc5", "address": "fa:16:3e:57:32:c7", "network": {"id": "cbace267-d4a1-458d-93e2-6df8af9f6413", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-331525453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66f2613477b84e80a089465581fa6af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5859bf70-44", "ovs_interfaceid": "5859bf70-4474-4ff0-8e13-13176937abc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.998654] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Releasing lock "refresh_cache-e5565f0d-ed60-4ac8-bba1-ab46b337dd90" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.998967] env[68279]: DEBUG nova.compute.manager [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Received event network-changed-dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 749.999156] env[68279]: DEBUG nova.compute.manager [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Refreshing instance network info cache due to event network-changed-dccc2829-5441-46f6-8b0a-fbfa005d0fa5. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 749.999347] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Acquiring lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.051580] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962803, 'name': ReconfigVM_Task, 'duration_secs': 0.729501} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.051892] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f/33f3fc4a-319b-4dd9-90b5-05ee5483ac7f.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.052534] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab45c9aa-cbb3-47b1-8bec-a33e981d3ee1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.059654] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 750.059654] env[68279]: value = "task-2962806" [ 750.059654] env[68279]: _type = "Task" [ 750.059654] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.067846] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962806, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.076849] env[68279]: DEBUG nova.scheduler.client.report [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 750.144615] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962805, 'name': ReconfigVM_Task, 'duration_secs': 0.29744} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.144906] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Reconfigured VM instance instance-00000027 to attach disk [datastore2] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.145516] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7676c400-d99c-48ae-aba4-75143edcb30f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.152189] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 750.152189] env[68279]: value = "task-2962807" [ 750.152189] env[68279]: _type = "Task" [ 750.152189] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.170406] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962807, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.394911] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Releasing lock "refresh_cache-05b94aa5-3efc-4790-9d98-c2658b8e8b4b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.396617] env[68279]: DEBUG nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Instance network_info: |[{"id": "5859bf70-4474-4ff0-8e13-13176937abc5", "address": "fa:16:3e:57:32:c7", "network": {"id": "cbace267-d4a1-458d-93e2-6df8af9f6413", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-331525453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66f2613477b84e80a089465581fa6af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5859bf70-44", "ovs_interfaceid": "5859bf70-4474-4ff0-8e13-13176937abc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 750.397268] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:32:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5859bf70-4474-4ff0-8e13-13176937abc5', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.409438] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Creating folder: Project (66f2613477b84e80a089465581fa6af5). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.410284] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0ab0978-cc3a-4105-b5d4-41b09b389bfe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.422408] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Created folder: Project (66f2613477b84e80a089465581fa6af5) in parent group-v594445. [ 750.422408] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Creating folder: Instances. Parent ref: group-v594564. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.422408] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1d053ef-49af-4243-af6f-ec24d282fb69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.431765] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Created folder: Instances in parent group-v594564. [ 750.432016] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.432221] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.432424] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71283907-e188-41b1-97d2-09413c75f69a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.455542] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.455542] env[68279]: value = "task-2962810" [ 750.455542] env[68279]: _type = "Task" [ 750.455542] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.463461] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962810, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.569859] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962806, 'name': Rename_Task, 'duration_secs': 0.459584} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.570605] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.570605] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc10b3f5-1af5-4b5c-8a3e-3be01da4f1ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.576573] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 750.576573] env[68279]: value = "task-2962811" [ 750.576573] env[68279]: _type = "Task" [ 750.576573] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.582220] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.827s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.582565] env[68279]: DEBUG nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 750.588696] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.901s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.588967] env[68279]: DEBUG nova.objects.instance [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lazy-loading 'resources' on Instance uuid f927c34a-f155-4a1f-8151-b16a3cb3e9a1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.590243] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.665086] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962807, 'name': Rename_Task, 'duration_secs': 0.148609} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.665086] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.665086] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcb762da-2c1f-4c2e-921c-e67360ce8edd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.671640] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 750.671640] env[68279]: value = "task-2962812" [ 750.671640] env[68279]: _type = "Task" [ 750.671640] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.680091] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.681179] env[68279]: DEBUG nova.network.neutron [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updating instance_info_cache with network_info: [{"id": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "address": "fa:16:3e:c6:15:27", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdccc2829-54", "ovs_interfaceid": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.703431] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Successfully updated port: 5d6278f5-880f-41ed-ad7e-f1211fdf3abf {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 750.907999] env[68279]: DEBUG nova.compute.manager [req-701c7e8b-47a8-461c-aa1d-f369f52d5560 req-2178241b-318c-4f26-a655-ef7402a8fd45 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received event network-vif-plugged-5d6278f5-880f-41ed-ad7e-f1211fdf3abf {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.907999] env[68279]: DEBUG oslo_concurrency.lockutils [req-701c7e8b-47a8-461c-aa1d-f369f52d5560 req-2178241b-318c-4f26-a655-ef7402a8fd45 service nova] Acquiring lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.907999] env[68279]: DEBUG oslo_concurrency.lockutils [req-701c7e8b-47a8-461c-aa1d-f369f52d5560 req-2178241b-318c-4f26-a655-ef7402a8fd45 service nova] Lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.907999] env[68279]: DEBUG oslo_concurrency.lockutils [req-701c7e8b-47a8-461c-aa1d-f369f52d5560 req-2178241b-318c-4f26-a655-ef7402a8fd45 service nova] Lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.907999] env[68279]: DEBUG nova.compute.manager [req-701c7e8b-47a8-461c-aa1d-f369f52d5560 req-2178241b-318c-4f26-a655-ef7402a8fd45 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] No waiting events found dispatching network-vif-plugged-5d6278f5-880f-41ed-ad7e-f1211fdf3abf {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 750.909939] env[68279]: WARNING nova.compute.manager [req-701c7e8b-47a8-461c-aa1d-f369f52d5560 req-2178241b-318c-4f26-a655-ef7402a8fd45 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received unexpected event network-vif-plugged-5d6278f5-880f-41ed-ad7e-f1211fdf3abf for instance with vm_state building and task_state spawning. [ 750.970538] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962810, 'name': CreateVM_Task, 'duration_secs': 0.329765} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.970730] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.971498] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.971671] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.972036] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 750.972319] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9240efa4-361c-4ae3-bc13-f8915a253278 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.977599] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 750.977599] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5243009a-ef85-72a9-2693-13ab8439a69d" [ 750.977599] env[68279]: _type = "Task" [ 750.977599] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.986616] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5243009a-ef85-72a9-2693-13ab8439a69d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.035800] env[68279]: DEBUG nova.compute.manager [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Received event network-changed-5859bf70-4474-4ff0-8e13-13176937abc5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.036143] env[68279]: DEBUG nova.compute.manager [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Refreshing instance network info cache due to event network-changed-5859bf70-4474-4ff0-8e13-13176937abc5. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 751.036501] env[68279]: DEBUG oslo_concurrency.lockutils [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] Acquiring lock "refresh_cache-05b94aa5-3efc-4790-9d98-c2658b8e8b4b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.036760] env[68279]: DEBUG oslo_concurrency.lockutils [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] Acquired lock "refresh_cache-05b94aa5-3efc-4790-9d98-c2658b8e8b4b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.037109] env[68279]: DEBUG nova.network.neutron [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Refreshing network info cache for port 5859bf70-4474-4ff0-8e13-13176937abc5 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.091504] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.091709] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.091929] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962811, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.095526] env[68279]: DEBUG nova.compute.utils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 751.097351] env[68279]: DEBUG nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 751.097687] env[68279]: DEBUG nova.network.neutron [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 751.154293] env[68279]: DEBUG nova.policy [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6dcff6c11546f9b0907917a2463755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbad607de614a809c51668c2ac0d012', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 751.184944] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Releasing lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.186586] env[68279]: DEBUG oslo_vmware.api [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962812, 'name': PowerOnVM_Task, 'duration_secs': 0.459429} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.187209] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Acquired lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.188027] env[68279]: DEBUG nova.network.neutron [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Refreshing network info cache for port dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.189133] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 751.189374] env[68279]: INFO nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Took 6.93 seconds to spawn the instance on the hypervisor. [ 751.189669] env[68279]: DEBUG nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 751.193111] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c821104-84f1-4874-851b-f9590e42f417 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.452026] env[68279]: DEBUG nova.network.neutron [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Successfully created port: f28928fb-b9ce-40c3-87b6-80cb1360cef6 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.488163] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5243009a-ef85-72a9-2693-13ab8439a69d, 'name': SearchDatastore_Task, 'duration_secs': 0.010691} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.490732] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.490966] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.491225] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.491374] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.491553] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.491981] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-172df9c6-08f1-40b9-b94f-09b2bbb06153 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.504410] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.504410] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.506333] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-254e4346-e5b4-45b9-bd4a-9e393cd0be5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.510346] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 751.510346] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52603907-5cc9-c769-2b3c-4b8f8e31a4b2" [ 751.510346] env[68279]: _type = "Task" [ 751.510346] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.518833] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52603907-5cc9-c769-2b3c-4b8f8e31a4b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.588469] env[68279]: DEBUG oslo_vmware.api [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962811, 'name': PowerOnVM_Task, 'duration_secs': 0.656333} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.588768] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 751.588974] env[68279]: INFO nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Took 10.10 seconds to spawn the instance on the hypervisor. [ 751.589157] env[68279]: DEBUG nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 751.589970] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f61a6e-ce5e-4cae-8c1e-80a984bcebb0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.603586] env[68279]: DEBUG nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 751.677327] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0f5974-0c40-46b4-a585-e1ca5461e46a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.685371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33b771b-c01c-4c9c-a1d6-9eeab785c472 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.692464] env[68279]: DEBUG nova.compute.manager [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 751.693279] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a549e5-c168-4163-a3e2-81b7f9da1f66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.730269] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72894535-e50f-4f48-99dd-634c6ccb2037 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.738541] env[68279]: INFO nova.compute.manager [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Took 42.41 seconds to build instance. [ 751.745213] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e630e9-fce4-4a7a-a3c5-8253d3301b63 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.759633] env[68279]: DEBUG nova.compute.provider_tree [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.871013] env[68279]: DEBUG nova.network.neutron [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Updated VIF entry in instance network info cache for port 5859bf70-4474-4ff0-8e13-13176937abc5. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.871469] env[68279]: DEBUG nova.network.neutron [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Updating instance_info_cache with network_info: [{"id": "5859bf70-4474-4ff0-8e13-13176937abc5", "address": "fa:16:3e:57:32:c7", "network": {"id": "cbace267-d4a1-458d-93e2-6df8af9f6413", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-331525453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66f2613477b84e80a089465581fa6af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5859bf70-44", "ovs_interfaceid": "5859bf70-4474-4ff0-8e13-13176937abc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.929079] env[68279]: DEBUG nova.network.neutron [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updated VIF entry in instance network info cache for port dccc2829-5441-46f6-8b0a-fbfa005d0fa5. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.929079] env[68279]: DEBUG nova.network.neutron [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updating instance_info_cache with network_info: [{"id": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "address": "fa:16:3e:c6:15:27", "network": {"id": "4a3c1f4b-4caf-4f32-a685-8a80bbe32055", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1678849819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7bf4e6f720045e1854859d2966a887b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdccc2829-54", "ovs_interfaceid": "dccc2829-5441-46f6-8b0a-fbfa005d0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.022207] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52603907-5cc9-c769-2b3c-4b8f8e31a4b2, 'name': SearchDatastore_Task, 'duration_secs': 0.032717} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.022207] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e1584f5-183e-47c1-b889-1a23f8a69326 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.027175] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 752.027175] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524e6260-38a1-eefd-ca70-f2193cc2a900" [ 752.027175] env[68279]: _type = "Task" [ 752.027175] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.034539] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524e6260-38a1-eefd-ca70-f2193cc2a900, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.107506] env[68279]: DEBUG nova.compute.manager [None req-c9ac296f-e28a-42d8-937f-728786f4557b tempest-ServerDiagnosticsTest-242039183 tempest-ServerDiagnosticsTest-242039183-project-admin] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.112945] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297894b0-a8da-417b-842c-3eb3105c1f5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.116113] env[68279]: INFO nova.compute.manager [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Took 44.53 seconds to build instance. [ 752.121501] env[68279]: INFO nova.compute.manager [None req-c9ac296f-e28a-42d8-937f-728786f4557b tempest-ServerDiagnosticsTest-242039183 tempest-ServerDiagnosticsTest-242039183-project-admin] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Retrieving diagnostics [ 752.122429] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14ca97a-2828-4fae-a2bc-1d5db09fd366 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.243836] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a02d5596-ab44-421e-804c-2539f6733c7c tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.111s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.265032] env[68279]: DEBUG nova.scheduler.client.report [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 752.376244] env[68279]: DEBUG oslo_concurrency.lockutils [req-0b9b34f1-b433-49cd-b42f-43de9861645a req-736cd775-918e-4a89-92e4-52f8af9c5a8f service nova] Releasing lock "refresh_cache-05b94aa5-3efc-4790-9d98-c2658b8e8b4b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.432192] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Releasing lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.432442] env[68279]: DEBUG nova.compute.manager [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Received event network-vif-plugged-5859bf70-4474-4ff0-8e13-13176937abc5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.432641] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Acquiring lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.432873] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.432997] env[68279]: DEBUG oslo_concurrency.lockutils [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.433175] env[68279]: DEBUG nova.compute.manager [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] No waiting events found dispatching network-vif-plugged-5859bf70-4474-4ff0-8e13-13176937abc5 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 752.433338] env[68279]: WARNING nova.compute.manager [req-4aa50b56-ab0d-4539-a7a8-1efdf772a6f8 req-b47d9b74-1b1e-4414-bc3e-4581aabb4c7b service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Received unexpected event network-vif-plugged-5859bf70-4474-4ff0-8e13-13176937abc5 for instance with vm_state building and task_state spawning. [ 752.538035] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524e6260-38a1-eefd-ca70-f2193cc2a900, 'name': SearchDatastore_Task, 'duration_secs': 0.023946} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.538299] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.538697] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 05b94aa5-3efc-4790-9d98-c2658b8e8b4b/05b94aa5-3efc-4790-9d98-c2658b8e8b4b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.539013] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a4fcd35-cca1-4b37-9ae2-6239545c655c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.545758] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 752.545758] env[68279]: value = "task-2962813" [ 752.545758] env[68279]: _type = "Task" [ 752.545758] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.553247] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962813, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.618781] env[68279]: DEBUG nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 752.621141] env[68279]: DEBUG oslo_concurrency.lockutils [None req-013082ed-dece-4e11-94fe-77cbde12ce0e tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.587s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.648389] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 752.648776] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.648871] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 752.649059] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.649440] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 752.649440] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 752.649595] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 752.649818] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 752.650221] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 752.650417] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 752.650672] env[68279]: DEBUG nova.virt.hardware [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 752.652031] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bda508c-d67b-475b-a0f0-740d8f35b45c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.661247] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f03530-17e8-471f-9ed4-3f09019aecb5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.749926] env[68279]: DEBUG nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 752.753842] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c860cd09-6a49-4041-be87-26bbdb8d7d35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.765947] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Doing hard reboot of VM {{(pid=68279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 752.765947] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-68c70201-bd96-4277-8e27-d71c4aaecb88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.770031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.181s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.774095] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.088s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.779069] env[68279]: INFO nova.compute.claims [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.783743] env[68279]: DEBUG oslo_vmware.api [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 752.783743] env[68279]: value = "task-2962814" [ 752.783743] env[68279]: _type = "Task" [ 752.783743] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.798483] env[68279]: DEBUG oslo_vmware.api [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962814, 'name': ResetVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.826033] env[68279]: INFO nova.scheduler.client.report [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Deleted allocations for instance f927c34a-f155-4a1f-8151-b16a3cb3e9a1 [ 753.031697] env[68279]: DEBUG nova.network.neutron [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Successfully updated port: f28928fb-b9ce-40c3-87b6-80cb1360cef6 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.058126] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962813, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.061242] env[68279]: DEBUG nova.compute.manager [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received event network-changed-5d6278f5-880f-41ed-ad7e-f1211fdf3abf {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 753.061538] env[68279]: DEBUG nova.compute.manager [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Refreshing instance network info cache due to event network-changed-5d6278f5-880f-41ed-ad7e-f1211fdf3abf. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 753.061876] env[68279]: DEBUG oslo_concurrency.lockutils [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] Acquiring lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.062653] env[68279]: DEBUG oslo_concurrency.lockutils [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] Acquired lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.062957] env[68279]: DEBUG nova.network.neutron [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Refreshing network info cache for port 5d6278f5-880f-41ed-ad7e-f1211fdf3abf {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.098163] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Successfully updated port: 471c04b2-cefa-42ee-b72b-a233b1b330c2 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.124538] env[68279]: DEBUG nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 753.274088] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.277903] env[68279]: DEBUG nova.compute.manager [req-0519e66c-cb73-4373-a79d-5b11067d4ac2 req-f5519875-a439-4f9c-88e2-f7876f5ca027 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received event network-vif-plugged-471c04b2-cefa-42ee-b72b-a233b1b330c2 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 753.278124] env[68279]: DEBUG oslo_concurrency.lockutils [req-0519e66c-cb73-4373-a79d-5b11067d4ac2 req-f5519875-a439-4f9c-88e2-f7876f5ca027 service nova] Acquiring lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.278345] env[68279]: DEBUG oslo_concurrency.lockutils [req-0519e66c-cb73-4373-a79d-5b11067d4ac2 req-f5519875-a439-4f9c-88e2-f7876f5ca027 service nova] Lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.278566] env[68279]: DEBUG oslo_concurrency.lockutils [req-0519e66c-cb73-4373-a79d-5b11067d4ac2 req-f5519875-a439-4f9c-88e2-f7876f5ca027 service nova] Lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.278814] env[68279]: DEBUG nova.compute.manager [req-0519e66c-cb73-4373-a79d-5b11067d4ac2 req-f5519875-a439-4f9c-88e2-f7876f5ca027 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] No waiting events found dispatching network-vif-plugged-471c04b2-cefa-42ee-b72b-a233b1b330c2 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 753.278947] env[68279]: WARNING nova.compute.manager [req-0519e66c-cb73-4373-a79d-5b11067d4ac2 req-f5519875-a439-4f9c-88e2-f7876f5ca027 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received unexpected event network-vif-plugged-471c04b2-cefa-42ee-b72b-a233b1b330c2 for instance with vm_state building and task_state spawning. [ 753.294120] env[68279]: DEBUG oslo_vmware.api [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962814, 'name': ResetVM_Task, 'duration_secs': 0.177305} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.294373] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Did hard reboot of VM {{(pid=68279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 753.294564] env[68279]: DEBUG nova.compute.manager [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.295326] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8530c6ef-0396-4315-9671-4122194b1d12 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.334987] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8779f065-d240-434e-a888-45cfa4358409 tempest-VolumesAdminNegativeTest-1271088836 tempest-VolumesAdminNegativeTest-1271088836-project-member] Lock "f927c34a-f155-4a1f-8151-b16a3cb3e9a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.560s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.389518] env[68279]: INFO nova.compute.manager [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Rebuilding instance [ 753.432474] env[68279]: DEBUG nova.compute.manager [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.434176] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db64133b-49cd-4823-8118-675e6976ead7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.517990] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.518303] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.518547] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.518895] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.519110] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.521264] env[68279]: INFO nova.compute.manager [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Terminating instance [ 753.534892] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "refresh_cache-01a624d3-782d-44cf-8a4e-05a85ac91c64" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.535041] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "refresh_cache-01a624d3-782d-44cf-8a4e-05a85ac91c64" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.535157] env[68279]: DEBUG nova.network.neutron [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.556634] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962813, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530961} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.556973] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 05b94aa5-3efc-4790-9d98-c2658b8e8b4b/05b94aa5-3efc-4790-9d98-c2658b8e8b4b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.557200] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.557386] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e04d9fb6-cb5f-4594-a401-ea1cc2f93766 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.564344] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 753.564344] env[68279]: value = "task-2962815" [ 753.564344] env[68279]: _type = "Task" [ 753.564344] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.573975] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962815, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.600284] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.602810] env[68279]: DEBUG nova.network.neutron [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.646270] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.745017] env[68279]: DEBUG nova.network.neutron [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.807944] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5be0a1ce-bd2b-4c43-a3f3-fe19357df7d5 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.760s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.025941] env[68279]: DEBUG nova.compute.manager [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.026412] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.028579] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d866b569-ff3e-4a78-8736-8d9bce0bc18a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.039868] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.042557] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0705c31-e514-490a-b40e-4bf81cfdd1df {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.051117] env[68279]: DEBUG oslo_vmware.api [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 754.051117] env[68279]: value = "task-2962816" [ 754.051117] env[68279]: _type = "Task" [ 754.051117] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.059865] env[68279]: DEBUG oslo_vmware.api [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.072758] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962815, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064821} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.073794] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.073794] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ecbe74-c210-4414-b02d-e4c4984e0eb2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.099280] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 05b94aa5-3efc-4790-9d98-c2658b8e8b4b/05b94aa5-3efc-4790-9d98-c2658b8e8b4b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.102653] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30d3b16b-bf3b-4b45-941c-7e54ccae7838 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.118598] env[68279]: DEBUG nova.network.neutron [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.126267] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 754.126267] env[68279]: value = "task-2962817" [ 754.126267] env[68279]: _type = "Task" [ 754.126267] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.138720] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962817, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.247735] env[68279]: DEBUG oslo_concurrency.lockutils [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] Releasing lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.248027] env[68279]: DEBUG nova.compute.manager [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Received event network-vif-plugged-f28928fb-b9ce-40c3-87b6-80cb1360cef6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.248225] env[68279]: DEBUG oslo_concurrency.lockutils [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] Acquiring lock "01a624d3-782d-44cf-8a4e-05a85ac91c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.248434] env[68279]: DEBUG oslo_concurrency.lockutils [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.248663] env[68279]: DEBUG oslo_concurrency.lockutils [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.248844] env[68279]: DEBUG nova.compute.manager [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] No waiting events found dispatching network-vif-plugged-f28928fb-b9ce-40c3-87b6-80cb1360cef6 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 754.249019] env[68279]: WARNING nova.compute.manager [req-d395ec1a-fd0a-4baa-ae1b-77efa41f8c47 req-d3a71460-fd67-4927-81f7-82b627242676 service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Received unexpected event network-vif-plugged-f28928fb-b9ce-40c3-87b6-80cb1360cef6 for instance with vm_state building and task_state spawning. [ 754.254569] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.254569] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.439089] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d56ad6-2eff-43d5-b583-ec9bdc9aaaa1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.446903] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb6b7ff-f202-4fa4-9990-d34e443a5c36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.454878] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.455485] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92741403-51af-4297-872b-8ecffbac197c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.461037] env[68279]: DEBUG nova.network.neutron [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Updating instance_info_cache with network_info: [{"id": "f28928fb-b9ce-40c3-87b6-80cb1360cef6", "address": "fa:16:3e:4e:3c:b4", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28928fb-b9", "ovs_interfaceid": "f28928fb-b9ce-40c3-87b6-80cb1360cef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.495283] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328583ff-327b-4722-98db-312175e7f620 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.497930] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 754.497930] env[68279]: value = "task-2962818" [ 754.497930] env[68279]: _type = "Task" [ 754.497930] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.505320] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96a72af-66e7-466c-8cff-dd9294068202 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.513518] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962818, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.524051] env[68279]: DEBUG nova.compute.provider_tree [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.562409] env[68279]: DEBUG oslo_vmware.api [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962816, 'name': PowerOffVM_Task, 'duration_secs': 0.214943} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.562688] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 754.562921] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 754.563247] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03d5d2c4-b4c9-48a1-bcaf-d070f2141f8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.626694] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 754.626967] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 754.628042] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Deleting the datastore file [datastore2] 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.631605] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8bf68b8-e70f-40a2-b2ca-c3eb438f74b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.639450] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.641257] env[68279]: DEBUG oslo_vmware.api [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for the task: (returnval){ [ 754.641257] env[68279]: value = "task-2962820" [ 754.641257] env[68279]: _type = "Task" [ 754.641257] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.651322] env[68279]: DEBUG oslo_vmware.api [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.804366] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.997638] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "refresh_cache-01a624d3-782d-44cf-8a4e-05a85ac91c64" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.997973] env[68279]: DEBUG nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Instance network_info: |[{"id": "f28928fb-b9ce-40c3-87b6-80cb1360cef6", "address": "fa:16:3e:4e:3c:b4", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28928fb-b9", "ovs_interfaceid": "f28928fb-b9ce-40c3-87b6-80cb1360cef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 754.998452] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:3c:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f28928fb-b9ce-40c3-87b6-80cb1360cef6', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.007239] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.009138] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.017981] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9664498-d628-44d2-9ff8-64303abfe56f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.041273] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "4e157792-f910-492c-ab29-dd3f86cb96a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.041651] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.041718] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "4e157792-f910-492c-ab29-dd3f86cb96a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.041913] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.042072] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.046137] env[68279]: DEBUG nova.scheduler.client.report [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.048088] env[68279]: INFO nova.compute.manager [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Terminating instance [ 755.062402] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.062402] env[68279]: value = "task-2962821" [ 755.062402] env[68279]: _type = "Task" [ 755.062402] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.062672] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962818, 'name': PowerOffVM_Task, 'duration_secs': 0.199958} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.063221] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.063448] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.067848] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41478ffd-10da-4d02-8c13-b29135925577 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.075392] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 755.078488] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d103bd54-a32c-4e71-a6d9-d9005a40a792 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.083016] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962821, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.106728] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 755.106937] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 755.107123] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Deleting the datastore file [datastore2] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.107387] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-075b8424-c75b-4b5a-a831-6dc037639da4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.116024] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 755.116024] env[68279]: value = "task-2962823" [ 755.116024] env[68279]: _type = "Task" [ 755.116024] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.125093] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.138620] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962817, 'name': ReconfigVM_Task, 'duration_secs': 0.591503} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.139343] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 05b94aa5-3efc-4790-9d98-c2658b8e8b4b/05b94aa5-3efc-4790-9d98-c2658b8e8b4b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.140051] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4be67f2-b709-448e-bba7-1b9dcbde7fb1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.149941] env[68279]: DEBUG oslo_vmware.api [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Task: {'id': task-2962820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181026} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.151410] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.151480] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.151599] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.151758] env[68279]: INFO nova.compute.manager [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 755.151996] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.152318] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 755.152318] env[68279]: value = "task-2962824" [ 755.152318] env[68279]: _type = "Task" [ 755.152318] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.152477] env[68279]: DEBUG nova.compute.manager [-] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.152585] env[68279]: DEBUG nova.network.neutron [-] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.162458] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962824, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.282548] env[68279]: DEBUG nova.compute.manager [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Received event network-changed-f28928fb-b9ce-40c3-87b6-80cb1360cef6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.282790] env[68279]: DEBUG nova.compute.manager [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Refreshing instance network info cache due to event network-changed-f28928fb-b9ce-40c3-87b6-80cb1360cef6. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 755.283110] env[68279]: DEBUG oslo_concurrency.lockutils [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] Acquiring lock "refresh_cache-01a624d3-782d-44cf-8a4e-05a85ac91c64" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.283527] env[68279]: DEBUG oslo_concurrency.lockutils [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] Acquired lock "refresh_cache-01a624d3-782d-44cf-8a4e-05a85ac91c64" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.283651] env[68279]: DEBUG nova.network.neutron [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Refreshing network info cache for port f28928fb-b9ce-40c3-87b6-80cb1360cef6 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.385286] env[68279]: DEBUG nova.network.neutron [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Updating instance_info_cache with network_info: [{"id": "5d6278f5-880f-41ed-ad7e-f1211fdf3abf", "address": "fa:16:3e:d5:df:f6", "network": {"id": "9f03824c-75b2-46fc-ae14-b8d2f706d84e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-443770033", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6278f5-88", "ovs_interfaceid": "5d6278f5-880f-41ed-ad7e-f1211fdf3abf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "address": "fa:16:3e:d0:bb:e3", "network": {"id": "bffe4d13-71d4-48c1-81df-90a34224bbe2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2115023872", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471c04b2-ce", "ovs_interfaceid": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.443298] env[68279]: DEBUG nova.compute.manager [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received event network-changed-471c04b2-cefa-42ee-b72b-a233b1b330c2 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.443491] env[68279]: DEBUG nova.compute.manager [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Refreshing instance network info cache due to event network-changed-471c04b2-cefa-42ee-b72b-a233b1b330c2. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 755.443694] env[68279]: DEBUG oslo_concurrency.lockutils [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] Acquiring lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.549460] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.775s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.550068] env[68279]: DEBUG nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 755.553149] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.549s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.553390] env[68279]: DEBUG nova.objects.instance [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lazy-loading 'resources' on Instance uuid 61392426-52b8-437e-ab3d-122d9335cd36 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.559147] env[68279]: DEBUG nova.compute.manager [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 755.559147] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.559394] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a6770f-fbe2-4526-87ad-bbf64719d025 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.569760] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.570796] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-870accd1-d0cd-42b7-ae44-ffa218bec776 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.577393] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962821, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.582127] env[68279]: DEBUG oslo_vmware.api [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 755.582127] env[68279]: value = "task-2962825" [ 755.582127] env[68279]: _type = "Task" [ 755.582127] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.592279] env[68279]: DEBUG oslo_vmware.api [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.625768] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110628} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.625936] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.626030] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.626257] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.665389] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962824, 'name': Rename_Task, 'duration_secs': 0.171143} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.665680] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.666653] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06d9d53b-2333-4579-9dc6-57a9a612f20d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.672863] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 755.672863] env[68279]: value = "task-2962826" [ 755.672863] env[68279]: _type = "Task" [ 755.672863] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.681777] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962826, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.888213] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Releasing lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.888616] env[68279]: DEBUG nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Instance network_info: |[{"id": "5d6278f5-880f-41ed-ad7e-f1211fdf3abf", "address": "fa:16:3e:d5:df:f6", "network": {"id": "9f03824c-75b2-46fc-ae14-b8d2f706d84e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-443770033", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6278f5-88", "ovs_interfaceid": "5d6278f5-880f-41ed-ad7e-f1211fdf3abf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "address": "fa:16:3e:d0:bb:e3", "network": {"id": "bffe4d13-71d4-48c1-81df-90a34224bbe2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2115023872", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471c04b2-ce", "ovs_interfaceid": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 755.891325] env[68279]: DEBUG oslo_concurrency.lockutils [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] Acquired lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.891523] env[68279]: DEBUG nova.network.neutron [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Refreshing network info cache for port 471c04b2-cefa-42ee-b72b-a233b1b330c2 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.894555] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:df:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d6278f5-880f-41ed-ad7e-f1211fdf3abf', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:bb:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '471c04b2-cefa-42ee-b72b-a233b1b330c2', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.904147] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.905136] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.905825] env[68279]: DEBUG nova.network.neutron [-] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.906887] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32f45126-ea1e-4f39-927e-877c2367bf87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.925141] env[68279]: INFO nova.compute.manager [-] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Took 0.77 seconds to deallocate network for instance. [ 755.934416] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.934416] env[68279]: value = "task-2962827" [ 755.934416] env[68279]: _type = "Task" [ 755.934416] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.944402] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962827, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.060495] env[68279]: DEBUG nova.compute.utils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 756.064756] env[68279]: DEBUG nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 756.064934] env[68279]: DEBUG nova.network.neutron [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.083072] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962821, 'name': CreateVM_Task, 'duration_secs': 0.57492} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.083072] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.086777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.087035] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.087428] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.088614] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7d79c7d-add4-41f6-ab35-f869a6533063 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.099176] env[68279]: DEBUG oslo_vmware.api [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962825, 'name': PowerOffVM_Task, 'duration_secs': 0.182093} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.101755] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.101966] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.102336] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 756.102336] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e44939-f6ff-6606-99cc-690737517011" [ 756.102336] env[68279]: _type = "Task" [ 756.102336] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.103047] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ded09683-010e-437f-90df-e7c15ea8ead9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.116838] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e44939-f6ff-6606-99cc-690737517011, 'name': SearchDatastore_Task, 'duration_secs': 0.011328} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.117163] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.118117] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.118117] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.118117] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.118117] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.118444] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdc94f53-d99d-4e5d-921a-287a855cfacd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.122443] env[68279]: DEBUG nova.policy [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5cd7e44689a40d993e5da3165332fd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '295e4a23df6e4d029636d514484434e5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 756.133935] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.134457] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 756.135227] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1485914c-df3c-4f35-9904-ee258e2b591e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.144219] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 756.144219] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52df4aac-6cbc-00bb-6a16-be9c8eebc523" [ 756.144219] env[68279]: _type = "Task" [ 756.144219] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.152214] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52df4aac-6cbc-00bb-6a16-be9c8eebc523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.153054] env[68279]: DEBUG nova.network.neutron [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Updated VIF entry in instance network info cache for port f28928fb-b9ce-40c3-87b6-80cb1360cef6. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.153386] env[68279]: DEBUG nova.network.neutron [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Updating instance_info_cache with network_info: [{"id": "f28928fb-b9ce-40c3-87b6-80cb1360cef6", "address": "fa:16:3e:4e:3c:b4", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28928fb-b9", "ovs_interfaceid": "f28928fb-b9ce-40c3-87b6-80cb1360cef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.182844] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.183132] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.183381] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Deleting the datastore file [datastore2] 4e157792-f910-492c-ab29-dd3f86cb96a8 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.184164] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ebf603b-e484-4fcc-abc1-fdcee5474385 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.189672] env[68279]: DEBUG oslo_vmware.api [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962826, 'name': PowerOnVM_Task, 'duration_secs': 0.468318} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.190368] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.190793] env[68279]: INFO nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Took 9.20 seconds to spawn the instance on the hypervisor. [ 756.191082] env[68279]: DEBUG nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.191967] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf5124e-7094-4580-aeff-e1c0038b2afa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.196838] env[68279]: DEBUG oslo_vmware.api [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 756.196838] env[68279]: value = "task-2962829" [ 756.196838] env[68279]: _type = "Task" [ 756.196838] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.214777] env[68279]: DEBUG oslo_vmware.api [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.436690] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.453172] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962827, 'name': CreateVM_Task, 'duration_secs': 0.42037} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.453172] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.453172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.453172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.453172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.453172] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bae66679-f2b9-4ec2-b230-a11c4c0b7ad8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.466020] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 756.466020] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527ff1dc-dac7-1e03-ec45-255040acc156" [ 756.466020] env[68279]: _type = "Task" [ 756.466020] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.473726] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527ff1dc-dac7-1e03-ec45-255040acc156, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.570233] env[68279]: DEBUG nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 756.625399] env[68279]: DEBUG nova.network.neutron [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Successfully created port: ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.656976] env[68279]: DEBUG oslo_concurrency.lockutils [req-d4a85298-5053-46e6-b736-bc33a3b26608 req-782d2ec1-6972-4f58-9848-0b053c91eabb service nova] Releasing lock "refresh_cache-01a624d3-782d-44cf-8a4e-05a85ac91c64" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.664412] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52df4aac-6cbc-00bb-6a16-be9c8eebc523, 'name': SearchDatastore_Task, 'duration_secs': 0.013841} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.671495] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b42ffbf-89f2-459e-be69-204e04e0c987 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.678646] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 756.678646] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52339369-dda9-22c8-3d9f-47d713e39830" [ 756.678646] env[68279]: _type = "Task" [ 756.678646] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.684579] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 756.684791] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.684947] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 756.685168] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.685336] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 756.685502] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 756.685711] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 756.685871] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 756.686061] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 756.686233] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 756.686411] env[68279]: DEBUG nova.virt.hardware [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 756.687252] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa124405-1be2-41dd-99c6-afc4fd8cfddf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.698153] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52339369-dda9-22c8-3d9f-47d713e39830, 'name': SearchDatastore_Task, 'duration_secs': 0.015005} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.700510] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.700781] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 01a624d3-782d-44cf-8a4e-05a85ac91c64/01a624d3-782d-44cf-8a4e-05a85ac91c64.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 756.701166] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8e74968-aacd-4943-bb61-8b4d56d33ef4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.704044] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93959c07-f305-46a7-b15b-1162bd7b24ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.736593] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 756.736593] env[68279]: value = "task-2962830" [ 756.736593] env[68279]: _type = "Task" [ 756.736593] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.737046] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.745019] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.745642] env[68279]: INFO nova.compute.manager [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Took 40.95 seconds to build instance. [ 756.754353] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.754725] env[68279]: DEBUG oslo_vmware.api [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395246} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.758947] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91f96fb7-be98-4fd6-9194-a19ff14cab35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.772031] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.772031] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 756.772031] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 756.772031] env[68279]: INFO nova.compute.manager [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Took 1.21 seconds to destroy the instance on the hypervisor. [ 756.772031] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.773268] env[68279]: DEBUG nova.compute.manager [-] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 756.773373] env[68279]: DEBUG nova.network.neutron [-] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 756.775738] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc915d6b-6e93-4693-99fa-8a0323f31995 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.784096] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.786815] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.786815] env[68279]: value = "task-2962831" [ 756.786815] env[68279]: _type = "Task" [ 756.786815] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.788089] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc132a01-a7ef-4f22-b4c5-bb2618c6c88c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.799998] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962831, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.826342] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a47e88a-f6bc-490d-aeb1-a0ca23b76fa1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.833807] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f8149e-18af-4480-91cd-4f9ea4e566f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.847420] env[68279]: DEBUG nova.compute.provider_tree [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.849561] env[68279]: DEBUG nova.network.neutron [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Updated VIF entry in instance network info cache for port 471c04b2-cefa-42ee-b72b-a233b1b330c2. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.849903] env[68279]: DEBUG nova.network.neutron [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Updating instance_info_cache with network_info: [{"id": "5d6278f5-880f-41ed-ad7e-f1211fdf3abf", "address": "fa:16:3e:d5:df:f6", "network": {"id": "9f03824c-75b2-46fc-ae14-b8d2f706d84e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-443770033", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6278f5-88", "ovs_interfaceid": "5d6278f5-880f-41ed-ad7e-f1211fdf3abf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "address": "fa:16:3e:d0:bb:e3", "network": {"id": "bffe4d13-71d4-48c1-81df-90a34224bbe2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2115023872", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471c04b2-ce", "ovs_interfaceid": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.975952] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527ff1dc-dac7-1e03-ec45-255040acc156, 'name': SearchDatastore_Task, 'duration_secs': 0.010032} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.975952] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.975952] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.976311] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.976408] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.976649] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.976861] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ff86dcc-43b3-4db5-a42f-89ce89f616b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.985735] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.985991] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 756.986791] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be4a274e-9831-43e8-bde4-849d32b9b547 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.992213] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 756.992213] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288f053-74f2-354e-12ea-1c059f541a60" [ 756.992213] env[68279]: _type = "Task" [ 756.992213] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.000521] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288f053-74f2-354e-12ea-1c059f541a60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.256579] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac59c992-3504-4f7d-973c-855248d4b0b0 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.361s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.266711] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962830, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.302517] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962831, 'name': CreateVM_Task, 'duration_secs': 0.289636} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.302770] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.303270] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.303506] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.303896] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 757.304232] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11d8b9e5-0bf9-4c98-99fa-af860f63896f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.312620] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 757.312620] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5275b932-c5a0-ca9b-ddea-8f57468e5244" [ 757.312620] env[68279]: _type = "Task" [ 757.312620] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.321932] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5275b932-c5a0-ca9b-ddea-8f57468e5244, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.338342] env[68279]: DEBUG nova.compute.manager [req-940236fb-6e95-45bc-8d73-64915b7a5cfb req-bcf4efa3-54c7-421e-b9f1-1bcdc99c067d service nova] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Received event network-vif-deleted-f572d86a-c060-4562-9aee-88a99349e45a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 757.338566] env[68279]: DEBUG nova.compute.manager [req-940236fb-6e95-45bc-8d73-64915b7a5cfb req-bcf4efa3-54c7-421e-b9f1-1bcdc99c067d service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Received event network-vif-deleted-dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 757.338747] env[68279]: INFO nova.compute.manager [req-940236fb-6e95-45bc-8d73-64915b7a5cfb req-bcf4efa3-54c7-421e-b9f1-1bcdc99c067d service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Neutron deleted interface dccc2829-5441-46f6-8b0a-fbfa005d0fa5; detaching it from the instance and deleting it from the info cache [ 757.338916] env[68279]: DEBUG nova.network.neutron [req-940236fb-6e95-45bc-8d73-64915b7a5cfb req-bcf4efa3-54c7-421e-b9f1-1bcdc99c067d service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.354115] env[68279]: DEBUG nova.scheduler.client.report [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.356612] env[68279]: DEBUG oslo_concurrency.lockutils [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] Releasing lock "refresh_cache-d452e3d2-1590-4352-8406-31d85b2921f4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.356872] env[68279]: DEBUG nova.compute.manager [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Received event network-changed-dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 757.357060] env[68279]: DEBUG nova.compute.manager [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Refreshing instance network info cache due to event network-changed-dccc2829-5441-46f6-8b0a-fbfa005d0fa5. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 757.357267] env[68279]: DEBUG oslo_concurrency.lockutils [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] Acquiring lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.357409] env[68279]: DEBUG oslo_concurrency.lockutils [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] Acquired lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.357567] env[68279]: DEBUG nova.network.neutron [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Refreshing network info cache for port dccc2829-5441-46f6-8b0a-fbfa005d0fa5 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.505301] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288f053-74f2-354e-12ea-1c059f541a60, 'name': SearchDatastore_Task, 'duration_secs': 0.03792} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.506165] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-473333ea-208b-4f62-9ced-251a240ab044 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.512487] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 757.512487] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b79c7f-ff4a-c99f-fd53-448d7ff1919a" [ 757.512487] env[68279]: _type = "Task" [ 757.512487] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.520976] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b79c7f-ff4a-c99f-fd53-448d7ff1919a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.582115] env[68279]: DEBUG nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 757.611008] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 757.611271] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.611452] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 757.611866] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.611866] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 757.611866] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 757.612113] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 757.612288] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 757.612456] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 757.612617] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 757.612789] env[68279]: DEBUG nova.virt.hardware [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 757.613646] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50979672-ec09-44e5-a355-5da5e4d4d4fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.623054] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fccba5-2f9f-49f3-b23e-451bbcfc1e6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.739433] env[68279]: DEBUG nova.network.neutron [-] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.765399] env[68279]: DEBUG nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.767896] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962830, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647863} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.768585] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 01a624d3-782d-44cf-8a4e-05a85ac91c64/01a624d3-782d-44cf-8a4e-05a85ac91c64.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 757.768810] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.769075] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf2795d3-97ca-4f3c-a0e3-2323d331ee4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.777014] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 757.777014] env[68279]: value = "task-2962832" [ 757.777014] env[68279]: _type = "Task" [ 757.777014] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.785369] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.822181] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5275b932-c5a0-ca9b-ddea-8f57468e5244, 'name': SearchDatastore_Task, 'duration_secs': 0.022482} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.822505] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.822798] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.824118] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.824118] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.824118] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.824118] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7d37148-c390-4044-b569-c49d50ad1f29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.833250] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.833377] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.834091] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52795047-1894-43a1-ab2e-c61aaf0c2d05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.840100] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 757.840100] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7d396-1da2-33fd-970b-aa92a557eca7" [ 757.840100] env[68279]: _type = "Task" [ 757.840100] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.844984] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0f2b89d-94a3-4444-86d0-fc0e71b16eab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.851317] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7d396-1da2-33fd-970b-aa92a557eca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.856831] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81970dd-1a28-4376-b4b6-f557899a7208 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.867652] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.314s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.871580] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.660s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.873041] env[68279]: INFO nova.compute.claims [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.906268] env[68279]: DEBUG nova.compute.manager [req-940236fb-6e95-45bc-8d73-64915b7a5cfb req-bcf4efa3-54c7-421e-b9f1-1bcdc99c067d service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Detach interface failed, port_id=dccc2829-5441-46f6-8b0a-fbfa005d0fa5, reason: Instance 4e157792-f910-492c-ab29-dd3f86cb96a8 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 757.908007] env[68279]: INFO nova.scheduler.client.report [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Deleted allocations for instance 61392426-52b8-437e-ab3d-122d9335cd36 [ 757.913236] env[68279]: DEBUG nova.network.neutron [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.013805] env[68279]: DEBUG nova.network.neutron [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.024819] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b79c7f-ff4a-c99f-fd53-448d7ff1919a, 'name': SearchDatastore_Task, 'duration_secs': 0.029157} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.025664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.025989] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] d452e3d2-1590-4352-8406-31d85b2921f4/d452e3d2-1590-4352-8406-31d85b2921f4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 758.026300] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24f254ea-3132-4cb0-a5f4-c010fc3dcedc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.034230] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 758.034230] env[68279]: value = "task-2962833" [ 758.034230] env[68279]: _type = "Task" [ 758.034230] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.042459] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962833, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.242774] env[68279]: INFO nova.compute.manager [-] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Took 1.47 seconds to deallocate network for instance. [ 758.291093] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.292377] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.352056] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7d396-1da2-33fd-970b-aa92a557eca7, 'name': SearchDatastore_Task, 'duration_secs': 0.026724} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.353667] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e76803cb-c902-496e-ab24-59459793808f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.359892] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 758.359892] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5220a852-8ae5-ad3e-7e2f-39cbfa79683b" [ 758.359892] env[68279]: _type = "Task" [ 758.359892] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.369567] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5220a852-8ae5-ad3e-7e2f-39cbfa79683b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.388300] env[68279]: DEBUG nova.compute.manager [req-496987e7-b5a4-480d-9b9f-c168576da86d req-da0acf22-7762-4cbe-9f6d-43b150b3ce5e service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Received event network-vif-plugged-ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.388300] env[68279]: DEBUG oslo_concurrency.lockutils [req-496987e7-b5a4-480d-9b9f-c168576da86d req-da0acf22-7762-4cbe-9f6d-43b150b3ce5e service nova] Acquiring lock "7858163d-8e68-4565-b1e0-ecd2e9be350d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.388491] env[68279]: DEBUG oslo_concurrency.lockutils [req-496987e7-b5a4-480d-9b9f-c168576da86d req-da0acf22-7762-4cbe-9f6d-43b150b3ce5e service nova] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.388670] env[68279]: DEBUG oslo_concurrency.lockutils [req-496987e7-b5a4-480d-9b9f-c168576da86d req-da0acf22-7762-4cbe-9f6d-43b150b3ce5e service nova] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.388851] env[68279]: DEBUG nova.compute.manager [req-496987e7-b5a4-480d-9b9f-c168576da86d req-da0acf22-7762-4cbe-9f6d-43b150b3ce5e service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] No waiting events found dispatching network-vif-plugged-ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 758.389020] env[68279]: WARNING nova.compute.manager [req-496987e7-b5a4-480d-9b9f-c168576da86d req-da0acf22-7762-4cbe-9f6d-43b150b3ce5e service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Received unexpected event network-vif-plugged-ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 for instance with vm_state building and task_state spawning. [ 758.420324] env[68279]: DEBUG oslo_concurrency.lockutils [None req-70002615-7dfc-4fae-9424-eaf4dcbca3c7 tempest-ServerRescueTestJSONUnderV235-318879096 tempest-ServerRescueTestJSONUnderV235-318879096-project-member] Lock "61392426-52b8-437e-ab3d-122d9335cd36" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.971s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.426940] env[68279]: DEBUG nova.network.neutron [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Successfully updated port: ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 758.519936] env[68279]: DEBUG oslo_concurrency.lockutils [req-9a2c0dd7-d295-40da-af2b-7fe36b4c34fc req-4425ec1f-dddd-4ae9-a6ee-8dcd1c38fe96 service nova] Releasing lock "refresh_cache-4e157792-f910-492c-ab29-dd3f86cb96a8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.544940] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962833, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.749361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.794755] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.772143} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.795147] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.795908] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d999ce48-5328-472c-82f5-0b5657482597 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.830551] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 01a624d3-782d-44cf-8a4e-05a85ac91c64/01a624d3-782d-44cf-8a4e-05a85ac91c64.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.830931] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfbd78e0-299d-4855-89fe-8902361035d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.853726] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 758.853726] env[68279]: value = "task-2962834" [ 758.853726] env[68279]: _type = "Task" [ 758.853726] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.867700] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962834, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.874361] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5220a852-8ae5-ad3e-7e2f-39cbfa79683b, 'name': SearchDatastore_Task, 'duration_secs': 0.025837} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.874682] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.874882] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 758.875177] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-651eb318-f427-496d-9d9f-d3080a07a96c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.883656] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 758.883656] env[68279]: value = "task-2962835" [ 758.883656] env[68279]: _type = "Task" [ 758.883656] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.895922] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.931436] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.931619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.931817] env[68279]: DEBUG nova.network.neutron [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 759.049125] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962833, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.949238} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.049125] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] d452e3d2-1590-4352-8406-31d85b2921f4/d452e3d2-1590-4352-8406-31d85b2921f4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.049301] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.049495] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce2b64ba-e987-40c1-a9a1-a877511a3d83 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.063730] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 759.063730] env[68279]: value = "task-2962836" [ 759.063730] env[68279]: _type = "Task" [ 759.063730] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.077295] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.371804] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962834, 'name': ReconfigVM_Task, 'duration_secs': 0.397693} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.372189] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 01a624d3-782d-44cf-8a4e-05a85ac91c64/01a624d3-782d-44cf-8a4e-05a85ac91c64.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.373148] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef275e40-37dc-4222-9753-5463b6c04df6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.386604] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 759.386604] env[68279]: value = "task-2962837" [ 759.386604] env[68279]: _type = "Task" [ 759.386604] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.405254] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962835, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.409959] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962837, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.503982] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6930865-1f17-4c37-8c53-a7327209d565 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.514386] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb5b8c5-b4be-4879-b07f-31019a1d0a7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.546706] env[68279]: DEBUG nova.network.neutron [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.549224] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c76ced-f589-4315-9fde-6f83f3659eb1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.559458] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943c29bc-3d9c-4d76-9df5-ce5604c303a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.574909] env[68279]: DEBUG nova.compute.provider_tree [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.581537] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962836, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.581537] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 759.582535] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc98a8e-1b57-4c4e-9be1-d6a760cffa7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.609371] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] d452e3d2-1590-4352-8406-31d85b2921f4/d452e3d2-1590-4352-8406-31d85b2921f4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.610059] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f85b8c38-33d7-41cf-825e-74080fd4ae68 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.636205] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 759.636205] env[68279]: value = "task-2962838" [ 759.636205] env[68279]: _type = "Task" [ 759.636205] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.647514] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962838, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.742869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.742869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.742869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.743096] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.743272] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.745546] env[68279]: INFO nova.compute.manager [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Terminating instance [ 759.898277] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962835, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720555} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.899017] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.899293] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.900326] env[68279]: DEBUG nova.network.neutron [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Updating instance_info_cache with network_info: [{"id": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "address": "fa:16:3e:ee:e7:74", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6e9c1c-19", "ovs_interfaceid": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.901777] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f6e3ab8-0e6a-43f0-a12f-9ab29ef6ac2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.909275] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962837, 'name': Rename_Task, 'duration_secs': 0.159263} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.910193] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 759.910459] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3696806b-8b74-423c-a092-037140067b90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.913948] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 759.913948] env[68279]: value = "task-2962839" [ 759.913948] env[68279]: _type = "Task" [ 759.913948] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.919056] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 759.919056] env[68279]: value = "task-2962840" [ 759.919056] env[68279]: _type = "Task" [ 759.919056] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.925470] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962839, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.932438] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.081036] env[68279]: DEBUG nova.scheduler.client.report [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.149787] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962838, 'name': ReconfigVM_Task, 'duration_secs': 0.286021} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.149787] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Reconfigured VM instance instance-00000029 to attach disk [datastore2] d452e3d2-1590-4352-8406-31d85b2921f4/d452e3d2-1590-4352-8406-31d85b2921f4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.149787] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c49bb9d3-bffb-4e40-bd1e-b5f7c89c4f02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.155334] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 760.155334] env[68279]: value = "task-2962841" [ 760.155334] env[68279]: _type = "Task" [ 760.155334] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.164538] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962841, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.250665] env[68279]: DEBUG nova.compute.manager [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 760.251106] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 760.252077] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28800f1-3b1c-4156-91cc-36e11e38f9bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.260118] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 760.260527] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f56c6f7-4e1c-450b-86df-246d31d4fc46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.269427] env[68279]: DEBUG oslo_vmware.api [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 760.269427] env[68279]: value = "task-2962842" [ 760.269427] env[68279]: _type = "Task" [ 760.269427] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.277295] env[68279]: DEBUG oslo_vmware.api [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.411070] env[68279]: DEBUG nova.compute.manager [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Received event network-changed-ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 760.411320] env[68279]: DEBUG nova.compute.manager [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Refreshing instance network info cache due to event network-changed-ff6e9c1c-196d-4f5d-aee7-1248e8476fa6. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 760.411463] env[68279]: DEBUG oslo_concurrency.lockutils [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] Acquiring lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.411820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.412113] env[68279]: DEBUG nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Instance network_info: |[{"id": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "address": "fa:16:3e:ee:e7:74", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6e9c1c-19", "ovs_interfaceid": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 760.412655] env[68279]: DEBUG oslo_concurrency.lockutils [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] Acquired lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.412831] env[68279]: DEBUG nova.network.neutron [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Refreshing network info cache for port ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.417021] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:e7:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff6e9c1c-196d-4f5d-aee7-1248e8476fa6', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.421844] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 760.423576] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.430034] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04c4ad61-d335-4862-9c57-0f6259bd8d90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.452759] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962839, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071188} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.458779] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.459507] env[68279]: DEBUG oslo_vmware.api [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962840, 'name': PowerOnVM_Task, 'duration_secs': 0.504636} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.459699] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.459699] env[68279]: value = "task-2962843" [ 760.459699] env[68279]: _type = "Task" [ 760.459699] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.460474] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873a8be1-7504-4dd7-8e3f-454f7eb2a2a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.462911] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 760.463142] env[68279]: INFO nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Took 7.84 seconds to spawn the instance on the hypervisor. [ 760.463318] env[68279]: DEBUG nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 760.464419] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fdc805-caf1-4dc0-9bb5-1049b86eeb38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.488890] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962843, 'name': CreateVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.497536] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.498115] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5079943-4cf3-4227-9375-d387a3d32f96 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.518758] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 760.518758] env[68279]: value = "task-2962844" [ 760.518758] env[68279]: _type = "Task" [ 760.518758] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.527845] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962844, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.585617] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.586245] env[68279]: DEBUG nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 760.590257] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.056s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.590257] env[68279]: DEBUG nova.objects.instance [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lazy-loading 'resources' on Instance uuid 1d16a5c5-981b-474e-8159-820ac6fcc42d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.668423] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962841, 'name': Rename_Task, 'duration_secs': 0.166344} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.668782] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 760.669067] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68d10ca0-23b5-46ad-b71d-310412e04b7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.676914] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 760.676914] env[68279]: value = "task-2962845" [ 760.676914] env[68279]: _type = "Task" [ 760.676914] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.686727] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962845, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.781896] env[68279]: DEBUG oslo_vmware.api [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962842, 'name': PowerOffVM_Task, 'duration_secs': 0.207068} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.781896] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 760.781896] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.782185] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28520652-45f3-439e-b5af-bb80a3dc596f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.844300] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 760.845030] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 760.845030] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Deleting the datastore file [datastore2] 05b94aa5-3efc-4790-9d98-c2658b8e8b4b {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.845421] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f89b2223-6811-4f21-85fe-d8525ffb62ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.853692] env[68279]: DEBUG oslo_vmware.api [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for the task: (returnval){ [ 760.853692] env[68279]: value = "task-2962847" [ 760.853692] env[68279]: _type = "Task" [ 760.853692] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.862727] env[68279]: DEBUG oslo_vmware.api [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.975800] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962843, 'name': CreateVM_Task, 'duration_secs': 0.369719} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.975995] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 760.977304] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.977463] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.977985] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 760.979119] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-050a342c-95ed-42a1-91d6-880fd778305b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.985086] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 760.985086] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f9bdf3-64cd-cec4-dbfb-c5577b78af81" [ 760.985086] env[68279]: _type = "Task" [ 760.985086] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.993807] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f9bdf3-64cd-cec4-dbfb-c5577b78af81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.008678] env[68279]: INFO nova.compute.manager [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Took 39.15 seconds to build instance. [ 761.029303] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962844, 'name': ReconfigVM_Task, 'duration_secs': 0.376849} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.030215] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Reconfigured VM instance instance-00000027 to attach disk [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.031097] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2cf8fea-fbf1-4e63-b42e-2eb78c6c5b6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.037104] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 761.037104] env[68279]: value = "task-2962848" [ 761.037104] env[68279]: _type = "Task" [ 761.037104] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.050645] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962848, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.096136] env[68279]: DEBUG nova.compute.utils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.101021] env[68279]: DEBUG nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 761.189255] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962845, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.335282] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac697ae-0eb1-4b18-aa4b-0eb699df1a60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.342446] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c775814e-dabc-4aa6-949b-5fba9a110d41 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Suspending the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 761.346270] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c340551f-aa44-47c3-a60d-61238a1cad76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.352955] env[68279]: DEBUG oslo_vmware.api [None req-c775814e-dabc-4aa6-949b-5fba9a110d41 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 761.352955] env[68279]: value = "task-2962849" [ 761.352955] env[68279]: _type = "Task" [ 761.352955] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.381027] env[68279]: DEBUG oslo_vmware.api [None req-c775814e-dabc-4aa6-949b-5fba9a110d41 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962849, 'name': SuspendVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.386995] env[68279]: DEBUG oslo_vmware.api [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Task: {'id': task-2962847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152435} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.387381] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.387660] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 761.387924] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 761.388158] env[68279]: INFO nova.compute.manager [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 761.389072] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 761.389072] env[68279]: DEBUG nova.compute.manager [-] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 761.389072] env[68279]: DEBUG nova.network.neutron [-] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.461405] env[68279]: DEBUG nova.network.neutron [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Updated VIF entry in instance network info cache for port ff6e9c1c-196d-4f5d-aee7-1248e8476fa6. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 761.461714] env[68279]: DEBUG nova.network.neutron [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Updating instance_info_cache with network_info: [{"id": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "address": "fa:16:3e:ee:e7:74", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6e9c1c-19", "ovs_interfaceid": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.504408] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f9bdf3-64cd-cec4-dbfb-c5577b78af81, 'name': SearchDatastore_Task, 'duration_secs': 0.010454} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.507926] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.508212] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.508502] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.508746] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.508925] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.510849] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f64d3ba-a614-427e-825f-aa4f89501286 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.513315] env[68279]: DEBUG oslo_concurrency.lockutils [None req-25e694b9-0079-4c01-82e2-0083901e60e4 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.766s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.522712] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.523281] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.524052] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fa32fd5-a4ad-49b9-8da6-632e2e32d6e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.531302] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 761.531302] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52804eb6-291a-61fe-9f44-8860553cc781" [ 761.531302] env[68279]: _type = "Task" [ 761.531302] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.562092] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962848, 'name': Rename_Task, 'duration_secs': 0.136566} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.562743] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52804eb6-291a-61fe-9f44-8860553cc781, 'name': SearchDatastore_Task, 'duration_secs': 0.011783} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.565655] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.566766] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee1497e1-366b-473a-b302-e8363653fdbc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.570387] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79a607f0-5840-4a1e-89c2-3cf4ffb1b13c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.574983] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 761.574983] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524f6490-d577-9444-0cc9-c6b9083b6293" [ 761.574983] env[68279]: _type = "Task" [ 761.574983] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.579586] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 761.579586] env[68279]: value = "task-2962850" [ 761.579586] env[68279]: _type = "Task" [ 761.579586] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.589470] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524f6490-d577-9444-0cc9-c6b9083b6293, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.595756] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962850, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.601731] env[68279]: DEBUG nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 761.688521] env[68279]: DEBUG oslo_vmware.api [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962845, 'name': PowerOnVM_Task, 'duration_secs': 0.697547} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.688521] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.688781] env[68279]: INFO nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Took 11.90 seconds to spawn the instance on the hypervisor. [ 761.689059] env[68279]: DEBUG nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.689883] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b66ffee-d315-4002-9db5-7f72050c5480 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.794826] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa81817-e871-4dce-8d1c-b7e9fbdf2b1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.811868] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58df4e64-b3e7-45ca-8ddd-f6516fbfad23 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.841944] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53df1f71-e74c-429f-8d4e-5ab3ae78e4e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.850330] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eeefc88-9d09-4aed-8831-06f5a968241d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.869321] env[68279]: DEBUG nova.compute.provider_tree [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.877457] env[68279]: DEBUG oslo_vmware.api [None req-c775814e-dabc-4aa6-949b-5fba9a110d41 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962849, 'name': SuspendVM_Task} progress is 62%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.969961] env[68279]: DEBUG oslo_concurrency.lockutils [req-ee7f6c25-2110-4749-8a94-b5aeee533858 req-0a550f45-6607-4175-9db8-d88a1474babb service nova] Releasing lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.017691] env[68279]: DEBUG nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 762.058756] env[68279]: DEBUG nova.compute.manager [req-ee07033b-468d-47b3-82a3-966df928fea4 req-f8400421-36b5-4af9-8bea-8b7d076212b8 service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Received event network-vif-deleted-5859bf70-4474-4ff0-8e13-13176937abc5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 762.058756] env[68279]: INFO nova.compute.manager [req-ee07033b-468d-47b3-82a3-966df928fea4 req-f8400421-36b5-4af9-8bea-8b7d076212b8 service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Neutron deleted interface 5859bf70-4474-4ff0-8e13-13176937abc5; detaching it from the instance and deleting it from the info cache [ 762.059014] env[68279]: DEBUG nova.network.neutron [req-ee07033b-468d-47b3-82a3-966df928fea4 req-f8400421-36b5-4af9-8bea-8b7d076212b8 service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.086882] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524f6490-d577-9444-0cc9-c6b9083b6293, 'name': SearchDatastore_Task, 'duration_secs': 0.011414} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.087528] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.087817] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 7858163d-8e68-4565-b1e0-ecd2e9be350d/7858163d-8e68-4565-b1e0-ecd2e9be350d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.088102] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a8d6695-b92a-4651-ad97-e2128e91c5e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.093362] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962850, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.097490] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 762.097490] env[68279]: value = "task-2962851" [ 762.097490] env[68279]: _type = "Task" [ 762.097490] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.111474] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.213466] env[68279]: INFO nova.compute.manager [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Took 45.86 seconds to build instance. [ 762.368100] env[68279]: DEBUG oslo_vmware.api [None req-c775814e-dabc-4aa6-949b-5fba9a110d41 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962849, 'name': SuspendVM_Task, 'duration_secs': 0.930644} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.368100] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c775814e-dabc-4aa6-949b-5fba9a110d41 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Suspended the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 762.368100] env[68279]: DEBUG nova.compute.manager [None req-c775814e-dabc-4aa6-949b-5fba9a110d41 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.368543] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674ab80f-f5fd-4d3d-9213-02bdeb660b32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.382293] env[68279]: DEBUG nova.scheduler.client.report [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.387275] env[68279]: DEBUG nova.network.neutron [-] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.551434] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.565349] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf77c1f4-0c1c-43f0-8c5a-50b54866e6b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.576418] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fce447-7c3c-420b-ac31-ca208166b25a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.605599] env[68279]: DEBUG oslo_vmware.api [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962850, 'name': PowerOnVM_Task, 'duration_secs': 0.631546} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.610210] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.610210] env[68279]: DEBUG nova.compute.manager [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.627027] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f36334-bf01-4a3d-9b68-ae59f2d4b9a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.630482] env[68279]: DEBUG nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 762.632809] env[68279]: DEBUG nova.compute.manager [req-ee07033b-468d-47b3-82a3-966df928fea4 req-f8400421-36b5-4af9-8bea-8b7d076212b8 service nova] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Detach interface failed, port_id=5859bf70-4474-4ff0-8e13-13176937abc5, reason: Instance 05b94aa5-3efc-4790-9d98-c2658b8e8b4b could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 762.638510] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533597} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.643022] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 7858163d-8e68-4565-b1e0-ecd2e9be350d/7858163d-8e68-4565-b1e0-ecd2e9be350d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 762.643022] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 762.646043] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bb1310c-77c8-4f4f-b2ec-450343393425 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.655035] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 762.655035] env[68279]: value = "task-2962852" [ 762.655035] env[68279]: _type = "Task" [ 762.655035] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.669973] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.672318] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 762.672557] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.672714] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 762.672894] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.673055] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 762.673208] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 762.673534] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 762.673749] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 762.674105] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 762.674105] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 762.674280] env[68279]: DEBUG nova.virt.hardware [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 762.675103] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55340d08-c0e6-4278-bfa2-883ddd7659c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.683066] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f5d2cd-dc69-416a-af1c-1bdc814653da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.699378] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.704900] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Creating folder: Project (bcf682a4812f4d99b8885e257f4e35b9). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.705599] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4771ce8-7ab7-4771-a80a-7796669f7315 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.715703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-869ccf69-07c2-41f0-b8a6-00daf267d3fc tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "d452e3d2-1590-4352-8406-31d85b2921f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.909s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.717763] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Created folder: Project (bcf682a4812f4d99b8885e257f4e35b9) in parent group-v594445. [ 762.717948] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Creating folder: Instances. Parent ref: group-v594571. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.721034] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcae6994-1259-4695-bddf-6b07c0531121 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.727874] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Created folder: Instances in parent group-v594571. [ 762.728140] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 762.730434] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 762.731300] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9ad6dd7-e120-407e-8b9e-8e8e075f2411 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.749623] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.749623] env[68279]: value = "task-2962855" [ 762.749623] env[68279]: _type = "Task" [ 762.749623] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.758356] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962855, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.890307] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.301s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.894436] env[68279]: INFO nova.compute.manager [-] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Took 1.51 seconds to deallocate network for instance. [ 762.895787] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.384s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.895962] env[68279]: DEBUG nova.objects.instance [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 762.925997] env[68279]: INFO nova.scheduler.client.report [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Deleted allocations for instance 1d16a5c5-981b-474e-8159-820ac6fcc42d [ 763.167934] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.171353] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07054} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.171603] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 763.172583] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60d31ab-143d-4950-84aa-e4ff7d3120c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.195807] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 7858163d-8e68-4565-b1e0-ecd2e9be350d/7858163d-8e68-4565-b1e0-ecd2e9be350d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 763.197299] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d452f551-a34d-43d0-bd5b-e569c411a31d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.221527] env[68279]: DEBUG nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 763.229168] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 763.229168] env[68279]: value = "task-2962856" [ 763.229168] env[68279]: _type = "Task" [ 763.229168] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.240594] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962856, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.262684] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962855, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.409753] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.435007] env[68279]: DEBUG oslo_concurrency.lockutils [None req-da1c5d17-0727-44f3-90f5-02f98b30e157 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333 tempest-FloatingIPsAssociationNegativeTestJSON-1615489333-project-member] Lock "1d16a5c5-981b-474e-8159-820ac6fcc42d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.540s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.721516] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "d452e3d2-1590-4352-8406-31d85b2921f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.721516] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "d452e3d2-1590-4352-8406-31d85b2921f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.721516] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.721516] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.721516] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "d452e3d2-1590-4352-8406-31d85b2921f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.723991] env[68279]: INFO nova.compute.manager [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Terminating instance [ 763.749021] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962856, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.761013] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962855, 'name': CreateVM_Task, 'duration_secs': 0.746594} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.763931] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.763931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.763931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.765097] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 763.765097] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9bd1be2-c4cc-4a3d-ade8-e044bdcf1474 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.769124] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.769505] env[68279]: INFO nova.compute.manager [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Rebuilding instance [ 763.774632] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 763.774632] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5247df24-1e0a-21bd-cad7-cc2efaa428ad" [ 763.774632] env[68279]: _type = "Task" [ 763.774632] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.783473] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5247df24-1e0a-21bd-cad7-cc2efaa428ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.830787] env[68279]: DEBUG nova.compute.manager [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 763.831847] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d63f26f-ddfa-4fa4-8a53-12064c8e7e6a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.907543] env[68279]: DEBUG oslo_concurrency.lockutils [None req-41937ebd-2f3b-4ff3-94db-d4397359ff7b tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.908781] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.592s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.910525] env[68279]: INFO nova.compute.claims [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.232495] env[68279]: DEBUG nova.compute.manager [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 764.232750] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.233761] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8496c5-5e1c-4e8a-9fb6-c74ab60ad4ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.248114] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.252647] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f05d723-53e0-42d7-b3ed-4a73bd99d917 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.255024] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962856, 'name': ReconfigVM_Task, 'duration_secs': 0.81145} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.255650] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 7858163d-8e68-4565-b1e0-ecd2e9be350d/7858163d-8e68-4565-b1e0-ecd2e9be350d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 764.256809] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5efa05f2-f104-45aa-8a55-58c5be3e693d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.264183] env[68279]: DEBUG oslo_vmware.api [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 764.264183] env[68279]: value = "task-2962857" [ 764.264183] env[68279]: _type = "Task" [ 764.264183] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.267808] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 764.267808] env[68279]: value = "task-2962858" [ 764.267808] env[68279]: _type = "Task" [ 764.267808] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.276572] env[68279]: DEBUG oslo_vmware.api [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962857, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.285784] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962858, 'name': Rename_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.292715] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5247df24-1e0a-21bd-cad7-cc2efaa428ad, 'name': SearchDatastore_Task, 'duration_secs': 0.009438} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.293037] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.293367] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.293716] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.293795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.293975] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.294270] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9837a1ce-019f-4de1-b58e-31b0de965aa5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.303295] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.304223] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.304313] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae5cb6d8-dab4-4444-b4e5-a1e2636c0434 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.313155] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 764.313155] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523808c5-1e0f-71fa-01af-a220ffa914d2" [ 764.313155] env[68279]: _type = "Task" [ 764.313155] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.322459] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523808c5-1e0f-71fa-01af-a220ffa914d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.774062] env[68279]: DEBUG oslo_vmware.api [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962857, 'name': PowerOffVM_Task, 'duration_secs': 0.201591} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.778037] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.778037] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.778241] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c548c0c-1da8-4d3f-abab-f22824c12354 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.786599] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962858, 'name': Rename_Task, 'duration_secs': 0.188925} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.787656] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 764.787656] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44b3b42f-4c4c-4fdd-b214-ac05c046da03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.795967] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 764.795967] env[68279]: value = "task-2962860" [ 764.795967] env[68279]: _type = "Task" [ 764.795967] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.805758] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962860, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.823370] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523808c5-1e0f-71fa-01af-a220ffa914d2, 'name': SearchDatastore_Task, 'duration_secs': 0.009547} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.829021] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da10b16f-15d7-4793-b141-4563ff9511b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.840210] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 764.840210] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522b049f-7a63-6fd3-7d19-b03826753efc" [ 764.840210] env[68279]: _type = "Task" [ 764.840210] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.851445] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.851445] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522b049f-7a63-6fd3-7d19-b03826753efc, 'name': SearchDatastore_Task, 'duration_secs': 0.009857} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.851631] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c78221e-eb26-4734-a7d7-2aba6f2bdc9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.853623] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.854133] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.854478] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa3d0a4c-65d8-4579-93d9-d079dd856c5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.863253] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 764.863253] env[68279]: value = "task-2962861" [ 764.863253] env[68279]: _type = "Task" [ 764.863253] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.864809] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 764.864809] env[68279]: value = "task-2962862" [ 764.864809] env[68279]: _type = "Task" [ 764.864809] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.876951] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.880565] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.888821] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.889089] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.889288] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Deleting the datastore file [datastore2] d452e3d2-1590-4352-8406-31d85b2921f4 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.889571] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82c15787-de3d-4adb-abff-bf026fa41f81 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.896283] env[68279]: DEBUG oslo_vmware.api [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for the task: (returnval){ [ 764.896283] env[68279]: value = "task-2962863" [ 764.896283] env[68279]: _type = "Task" [ 764.896283] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.904886] env[68279]: DEBUG oslo_vmware.api [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.309656] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962860, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.381983] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962861, 'name': PowerOffVM_Task, 'duration_secs': 0.114424} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.381983] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 765.382186] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 765.385019] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1bdcb6-0fd2-410e-9cf4-eb3469e29bc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.389657] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962862, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503133} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.390254] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.390457] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.390695] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed672224-9502-4eae-aba9-3950f9c6584c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.396906] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 765.397661] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c4e566b-87d5-4549-8da5-d9fff7370b99 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.403104] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 765.403104] env[68279]: value = "task-2962864" [ 765.403104] env[68279]: _type = "Task" [ 765.403104] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.409499] env[68279]: DEBUG oslo_vmware.api [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Task: {'id': task-2962863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262145} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.414228] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.414386] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.414532] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.414746] env[68279]: INFO nova.compute.manager [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 765.415054] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.415980] env[68279]: DEBUG nova.compute.manager [-] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.416102] env[68279]: DEBUG nova.network.neutron [-] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.421578] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962864, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.426061] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 765.426345] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 765.426682] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Deleting the datastore file [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 765.426827] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-043c1a2a-1948-4eb8-8b50-cdc3bab51508 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.433286] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 765.433286] env[68279]: value = "task-2962866" [ 765.433286] env[68279]: _type = "Task" [ 765.433286] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.444447] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.543212] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cee4ab-a442-4ef7-83cd-c13488221004 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.551468] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f225c4-c8ad-4d4e-947d-5c99cf4953b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.604474] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4643611-c730-480c-871c-e693557526fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.623377] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29504878-a2f0-4cdf-8006-3d94ceaa6f87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.652521] env[68279]: DEBUG nova.compute.provider_tree [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.737318] env[68279]: DEBUG nova.compute.manager [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 765.738250] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c65c90e-45ec-40d5-a181-141c0d6bc6cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.805389] env[68279]: DEBUG nova.compute.manager [req-4cb4cb8d-232d-48a2-8bbc-c24db7ab0fb2 req-31698db1-49ab-4910-8a3f-48636dd2060a service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received event network-vif-deleted-5d6278f5-880f-41ed-ad7e-f1211fdf3abf {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.806053] env[68279]: INFO nova.compute.manager [req-4cb4cb8d-232d-48a2-8bbc-c24db7ab0fb2 req-31698db1-49ab-4910-8a3f-48636dd2060a service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Neutron deleted interface 5d6278f5-880f-41ed-ad7e-f1211fdf3abf; detaching it from the instance and deleting it from the info cache [ 765.806053] env[68279]: DEBUG nova.network.neutron [req-4cb4cb8d-232d-48a2-8bbc-c24db7ab0fb2 req-31698db1-49ab-4910-8a3f-48636dd2060a service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Updating instance_info_cache with network_info: [{"id": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "address": "fa:16:3e:d0:bb:e3", "network": {"id": "bffe4d13-71d4-48c1-81df-90a34224bbe2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2115023872", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.25", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4632448f387a49eda08bcdc55b94a84c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471c04b2-ce", "ovs_interfaceid": "471c04b2-cefa-42ee-b72b-a233b1b330c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.811257] env[68279]: DEBUG oslo_vmware.api [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962860, 'name': PowerOnVM_Task, 'duration_secs': 0.538504} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.811704] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 765.811897] env[68279]: INFO nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Took 8.23 seconds to spawn the instance on the hypervisor. [ 765.812262] env[68279]: DEBUG nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 765.813687] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba576b65-3525-4ba1-a31f-590e5f672a1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.913371] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962864, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150099} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.913649] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.914503] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1382031f-4b80-450f-839d-8b41f339cb3f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.936172] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.938103] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3194f040-9b19-42bb-91a6-5d992e926553 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.962472] env[68279]: DEBUG nova.network.neutron [-] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.975801] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231795} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.975801] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.976041] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.976121] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.978868] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 765.978868] env[68279]: value = "task-2962867" [ 765.978868] env[68279]: _type = "Task" [ 765.978868] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.988296] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962867, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.159393] env[68279]: DEBUG nova.scheduler.client.report [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 766.251494] env[68279]: INFO nova.compute.manager [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] instance snapshotting [ 766.251884] env[68279]: WARNING nova.compute.manager [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 766.254753] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac76bd44-6608-479f-b338-e1fe575757d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.276632] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a531057-6bce-4d44-ae88-5f89783849bc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.313773] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d24ec187-d96c-4037-99a6-46d2935c6f5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.323309] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd07e25-5007-4ee4-9dd0-89696c32d704 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.348019] env[68279]: INFO nova.compute.manager [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Took 40.68 seconds to build instance. [ 766.372535] env[68279]: DEBUG nova.compute.manager [req-4cb4cb8d-232d-48a2-8bbc-c24db7ab0fb2 req-31698db1-49ab-4910-8a3f-48636dd2060a service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Detach interface failed, port_id=5d6278f5-880f-41ed-ad7e-f1211fdf3abf, reason: Instance d452e3d2-1590-4352-8406-31d85b2921f4 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 766.429827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "b45f310f-e614-47db-9f6e-f35dd481137c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.430108] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "b45f310f-e614-47db-9f6e-f35dd481137c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.469764] env[68279]: INFO nova.compute.manager [-] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Took 1.05 seconds to deallocate network for instance. [ 766.492293] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.667043] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.758s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.667390] env[68279]: DEBUG nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 766.670381] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.803s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.670690] env[68279]: DEBUG nova.objects.instance [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lazy-loading 'resources' on Instance uuid d8eca7ac-744e-469c-9a87-901f0641f4f2 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 766.787940] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 766.788291] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6223f783-7a6b-4317-9ea9-6a188f027e57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.796040] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 766.796040] env[68279]: value = "task-2962868" [ 766.796040] env[68279]: _type = "Task" [ 766.796040] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.804366] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962868, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.849541] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cf0ab7bf-1d74-4e8b-94fc-6484b80df5c7 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.713s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.976253] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.996227] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962867, 'name': ReconfigVM_Task, 'duration_secs': 1.019646} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.996486] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.997324] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-706d4082-5650-47a8-8d1a-764278b67c08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.007532] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 767.007532] env[68279]: value = "task-2962869" [ 767.007532] env[68279]: _type = "Task" [ 767.007532] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.014035] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962869, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.022477] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.022588] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.022775] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.022942] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.023107] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.024371] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.024371] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.024371] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.024371] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.024371] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.024371] env[68279]: DEBUG nova.virt.hardware [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.025823] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa2bb57-9f4a-4340-9e6f-d320337ce715 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.036571] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3838fa-9644-4e8a-b8e1-451fc724730e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.056054] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.062261] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 767.062603] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 767.063173] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc948409-04ed-4506-b54a-9f443f9622fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.081703] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.081703] env[68279]: value = "task-2962870" [ 767.081703] env[68279]: _type = "Task" [ 767.081703] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.090012] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962870, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.146793] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] Acquiring lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.146793] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] Acquired lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.146793] env[68279]: DEBUG nova.network.neutron [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.176582] env[68279]: DEBUG nova.compute.utils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 767.181309] env[68279]: DEBUG nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 767.181501] env[68279]: DEBUG nova.network.neutron [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.234392] env[68279]: DEBUG nova.policy [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4d9c39cb0a142eab4370307dd41cf0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd517424aba641e4b867e440ba0ee7ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 767.313291] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962868, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.351567] env[68279]: DEBUG nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 767.518294] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962869, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.594639] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962870, 'name': CreateVM_Task, 'duration_secs': 0.269735} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.601058] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.601058] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.601058] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.601058] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.601058] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4508997a-4152-4acc-8152-6332c72bd184 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.605698] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 767.605698] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cd92f5-b003-fad6-72d3-73116e475d80" [ 767.605698] env[68279]: _type = "Task" [ 767.605698] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.617279] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cd92f5-b003-fad6-72d3-73116e475d80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.626825] env[68279]: DEBUG nova.network.neutron [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Successfully created port: 1692e61e-7fc8-48fb-bed2-cb281c3b64e6 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.684960] env[68279]: DEBUG nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 767.809904] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962868, 'name': CreateSnapshot_Task, 'duration_secs': 0.786365} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.810229] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 767.811062] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f365c9-85de-4062-8999-21e4ad51c308 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.834408] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b446e573-ea69-4e4a-aed4-871ff4c5abfb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.843834] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed91d36-baeb-4932-ab66-f8b206a37599 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.891326] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f0a222-04ca-4148-9222-34a649891f96 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.900250] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b9c96c-a908-4819-9b46-09627164c882 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.907667] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.920482] env[68279]: DEBUG nova.compute.provider_tree [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.016502] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962869, 'name': Rename_Task, 'duration_secs': 0.936612} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.016778] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.017039] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ace979d9-4382-45ee-bc71-7a40c624ec6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.024142] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 768.024142] env[68279]: value = "task-2962871" [ 768.024142] env[68279]: _type = "Task" [ 768.024142] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.035419] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962871, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.046331] env[68279]: DEBUG nova.compute.manager [req-b91d3069-e2df-4bc9-be9b-cf8fbee25e06 req-9d9c0c87-cc59-4c06-8537-08b3c94adcf4 service nova] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Received event network-vif-deleted-471c04b2-cefa-42ee-b72b-a233b1b330c2 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.116089] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cd92f5-b003-fad6-72d3-73116e475d80, 'name': SearchDatastore_Task, 'duration_secs': 0.01112} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.116944] env[68279]: DEBUG nova.network.neutron [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Updating instance_info_cache with network_info: [{"id": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "address": "fa:16:3e:ee:e7:74", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6e9c1c-19", "ovs_interfaceid": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.118210] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.118630] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.118710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.118865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.119065] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.119489] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c1ab730-c614-4728-900d-276f83c07dbc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.128457] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.128688] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.129416] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1414b91-7c04-4a3d-a607-b11ccb3b546f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.135752] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 768.135752] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52361410-7e25-0144-94c1-e087da0f8f8e" [ 768.135752] env[68279]: _type = "Task" [ 768.135752] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.145124] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52361410-7e25-0144-94c1-e087da0f8f8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.336177] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 768.336303] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e146422a-1a9c-4a9c-8c9f-565dec213bd2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.344720] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 768.344720] env[68279]: value = "task-2962872" [ 768.344720] env[68279]: _type = "Task" [ 768.344720] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.352718] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962872, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.442244] env[68279]: ERROR nova.scheduler.client.report [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [req-f5017dba-eeca-4ec5-beff-95f8712c7975] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f5017dba-eeca-4ec5-beff-95f8712c7975"}]} [ 768.463290] env[68279]: DEBUG nova.scheduler.client.report [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 768.480047] env[68279]: DEBUG nova.scheduler.client.report [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 768.480323] env[68279]: DEBUG nova.compute.provider_tree [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.494242] env[68279]: DEBUG nova.scheduler.client.report [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 768.521258] env[68279]: DEBUG nova.scheduler.client.report [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 768.533776] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962871, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.621007] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] Releasing lock "refresh_cache-7858163d-8e68-4565-b1e0-ecd2e9be350d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.621353] env[68279]: DEBUG nova.compute.manager [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Inject network info {{(pid=68279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 768.621780] env[68279]: DEBUG nova.compute.manager [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] network_info to inject: |[{"id": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "address": "fa:16:3e:ee:e7:74", "network": {"id": "d2306acf-763b-4812-a0a1-329945ce5b00", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2002824764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "295e4a23df6e4d029636d514484434e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6e9c1c-19", "ovs_interfaceid": "ff6e9c1c-196d-4f5d-aee7-1248e8476fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 768.627389] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Reconfiguring VM instance to set the machine id {{(pid=68279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 768.630873] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32c0dc22-0fcd-48ee-a945-1e34cf6b9949 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.654536] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52361410-7e25-0144-94c1-e087da0f8f8e, 'name': SearchDatastore_Task, 'duration_secs': 0.011094} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.659720] env[68279]: DEBUG oslo_vmware.api [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] Waiting for the task: (returnval){ [ 768.659720] env[68279]: value = "task-2962873" [ 768.659720] env[68279]: _type = "Task" [ 768.659720] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.660181] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16db09ff-e74e-4fd0-8d41-585299a57209 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.668967] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 768.668967] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ed68a9-95b0-fd7c-c60c-664b45b1f20a" [ 768.668967] env[68279]: _type = "Task" [ 768.668967] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.673075] env[68279]: DEBUG oslo_vmware.api [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] Task: {'id': task-2962873, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.687347] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ed68a9-95b0-fd7c-c60c-664b45b1f20a, 'name': SearchDatastore_Task, 'duration_secs': 0.010834} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.687347] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.687347] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 768.687347] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b17126d0-6dbe-489a-b416-823ef47fa0ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.697021] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 768.697021] env[68279]: value = "task-2962874" [ 768.697021] env[68279]: _type = "Task" [ 768.697021] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.705595] env[68279]: DEBUG nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 768.707494] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.730483] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 768.730772] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.730961] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 768.731221] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.731587] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 768.731587] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 768.731862] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 768.733741] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 768.733741] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 768.733741] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 768.733741] env[68279]: DEBUG nova.virt.hardware [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 768.734043] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c69edc7-e58b-4439-881e-91beb500f2aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.742479] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07be9f17-855c-416f-a7a2-b82f9f7064be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.857722] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962872, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.038975] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962871, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.109229] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75a958d-3b02-4af8-bfb6-3fc27498ed23 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.117746] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ac648b-fb1e-454e-b157-76b8e16ce611 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.152233] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdf48bb-84a0-4897-bdc7-fe9c014fad09 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.160534] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb85c04-916f-4822-9460-0045d147f617 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.182138] env[68279]: DEBUG nova.compute.provider_tree [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.187267] env[68279]: DEBUG oslo_vmware.api [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] Task: {'id': task-2962873, 'name': ReconfigVM_Task, 'duration_secs': 0.159148} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.188191] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc6f883-3055-4c39-8b22-a59186869340 tempest-ServersAdminTestJSON-1707708817 tempest-ServersAdminTestJSON-1707708817-project-admin] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Reconfigured VM instance to set the machine id {{(pid=68279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 769.205295] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962874, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.241301] env[68279]: DEBUG nova.network.neutron [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Successfully updated port: 1692e61e-7fc8-48fb-bed2-cb281c3b64e6 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 769.356323] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962872, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.537781] env[68279]: DEBUG oslo_vmware.api [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962871, 'name': PowerOnVM_Task, 'duration_secs': 1.18379} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.538051] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 769.538260] env[68279]: INFO nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Took 6.91 seconds to spawn the instance on the hypervisor. [ 769.538439] env[68279]: DEBUG nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.539253] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f30c4c-6dd6-4b5f-a88b-d1ddea138b2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.707498] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535807} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.707794] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.708069] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.708330] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-015f58d0-25fc-4385-95f1-ca1fc53cfcc3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.714586] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 769.714586] env[68279]: value = "task-2962875" [ 769.714586] env[68279]: _type = "Task" [ 769.714586] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.723699] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.729941] env[68279]: DEBUG nova.scheduler.client.report [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 769.730201] env[68279]: DEBUG nova.compute.provider_tree [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 67 to 68 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 769.730381] env[68279]: DEBUG nova.compute.provider_tree [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.743722] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.743870] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.744049] env[68279]: DEBUG nova.network.neutron [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.857519] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962872, 'name': CloneVM_Task, 'duration_secs': 1.43437} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.859055] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Created linked-clone VM from snapshot [ 769.859055] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a901c9ac-4d2a-468a-812d-cd7787bad0d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.866260] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Uploading image b217a09c-2e46-46c6-8837-a229cfd46700 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 769.886827] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 769.886827] env[68279]: value = "vm-594576" [ 769.886827] env[68279]: _type = "VirtualMachine" [ 769.886827] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 769.887432] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bf423613-eb5b-493e-b216-2e9baa5abe94 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.894915] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease: (returnval){ [ 769.894915] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cea985-917c-c8c4-471c-ff1945d4a1c1" [ 769.894915] env[68279]: _type = "HttpNfcLease" [ 769.894915] env[68279]: } obtained for exporting VM: (result){ [ 769.894915] env[68279]: value = "vm-594576" [ 769.894915] env[68279]: _type = "VirtualMachine" [ 769.894915] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 769.895344] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the lease: (returnval){ [ 769.895344] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cea985-917c-c8c4-471c-ff1945d4a1c1" [ 769.895344] env[68279]: _type = "HttpNfcLease" [ 769.895344] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 769.901707] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 769.901707] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cea985-917c-c8c4-471c-ff1945d4a1c1" [ 769.901707] env[68279]: _type = "HttpNfcLease" [ 769.901707] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 770.055322] env[68279]: INFO nova.compute.manager [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Took 39.89 seconds to build instance. [ 770.108331] env[68279]: DEBUG nova.compute.manager [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Received event network-vif-plugged-1692e61e-7fc8-48fb-bed2-cb281c3b64e6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.108530] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] Acquiring lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.108822] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.108996] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.109178] env[68279]: DEBUG nova.compute.manager [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] No waiting events found dispatching network-vif-plugged-1692e61e-7fc8-48fb-bed2-cb281c3b64e6 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 770.109356] env[68279]: WARNING nova.compute.manager [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Received unexpected event network-vif-plugged-1692e61e-7fc8-48fb-bed2-cb281c3b64e6 for instance with vm_state building and task_state spawning. [ 770.109511] env[68279]: DEBUG nova.compute.manager [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Received event network-changed-1692e61e-7fc8-48fb-bed2-cb281c3b64e6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.109725] env[68279]: DEBUG nova.compute.manager [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Refreshing instance network info cache due to event network-changed-1692e61e-7fc8-48fb-bed2-cb281c3b64e6. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 770.109919] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] Acquiring lock "refresh_cache-1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.129585] env[68279]: INFO nova.compute.manager [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Rebuilding instance [ 770.170767] env[68279]: DEBUG nova.compute.manager [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.171701] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4283618c-816f-46ce-bdb3-f496a10ef931 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.225435] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109499} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.225435] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.225793] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a056cad1-735b-4c46-aa1b-a57271690456 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.239370] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.569s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.250401] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.251198] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.684s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.253663] env[68279]: INFO nova.compute.claims [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 770.260123] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62944e60-78a4-4166-b3cc-db7e48f02ad8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.277669] env[68279]: INFO nova.scheduler.client.report [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted allocations for instance d8eca7ac-744e-469c-9a87-901f0641f4f2 [ 770.282260] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 770.282260] env[68279]: value = "task-2962877" [ 770.282260] env[68279]: _type = "Task" [ 770.282260] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.293627] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.320176] env[68279]: DEBUG nova.network.neutron [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.403120] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 770.403120] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cea985-917c-c8c4-471c-ff1945d4a1c1" [ 770.403120] env[68279]: _type = "HttpNfcLease" [ 770.403120] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 770.403419] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 770.403419] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cea985-917c-c8c4-471c-ff1945d4a1c1" [ 770.403419] env[68279]: _type = "HttpNfcLease" [ 770.403419] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 770.404415] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b04c1af-b82f-4d54-9f71-31ba5c74a962 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.413954] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ba757c-a700-2dba-fd24-e3cede62e06c/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 770.414149] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ba757c-a700-2dba-fd24-e3cede62e06c/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 770.513850] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1199e875-d01e-4de6-9627-4914b40aa2d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.557384] env[68279]: DEBUG oslo_concurrency.lockutils [None req-947ed722-6114-4260-9e26-43ece0255f70 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.165s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.610409] env[68279]: DEBUG nova.network.neutron [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Updating instance_info_cache with network_info: [{"id": "1692e61e-7fc8-48fb-bed2-cb281c3b64e6", "address": "fa:16:3e:3a:c5:e3", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1692e61e-7f", "ovs_interfaceid": "1692e61e-7fc8-48fb-bed2-cb281c3b64e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.790206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-52e31f9d-4755-4090-82cf-29b885eb91ec tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "d8eca7ac-744e-469c-9a87-901f0641f4f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.517s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.801059] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962877, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.921700] env[68279]: INFO nova.compute.manager [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Rebuilding instance [ 770.981768] env[68279]: DEBUG nova.compute.manager [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.982796] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f28d382-06ae-4f56-8e43-ee84a54d4177 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.061679] env[68279]: DEBUG nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.113022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.113346] env[68279]: DEBUG nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Instance network_info: |[{"id": "1692e61e-7fc8-48fb-bed2-cb281c3b64e6", "address": "fa:16:3e:3a:c5:e3", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1692e61e-7f", "ovs_interfaceid": "1692e61e-7fc8-48fb-bed2-cb281c3b64e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 771.113680] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] Acquired lock "refresh_cache-1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.113976] env[68279]: DEBUG nova.network.neutron [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Refreshing network info cache for port 1692e61e-7fc8-48fb-bed2-cb281c3b64e6 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.121026] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:c5:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55764410-260e-4339-a020-6b30995584bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1692e61e-7fc8-48fb-bed2-cb281c3b64e6', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 771.129436] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating folder: Project (fd517424aba641e4b867e440ba0ee7ac). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.130596] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04f67d74-b376-4b31-9145-f083e78f3445 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.142458] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created folder: Project (fd517424aba641e4b867e440ba0ee7ac) in parent group-v594445. [ 771.142662] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating folder: Instances. Parent ref: group-v594577. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.142994] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a00d9ab-6169-4631-b277-d77809f6b1e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.153163] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created folder: Instances in parent group-v594577. [ 771.153331] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 771.153458] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 771.153677] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05e1304a-0d36-4a11-8b70-3aaa3f4cb4fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.185111] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.185717] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc190ca3-a538-4523-9bd3-1aac93fa8322 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.191073] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 771.191073] env[68279]: value = "task-2962880" [ 771.191073] env[68279]: _type = "Task" [ 771.191073] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.199620] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 771.199620] env[68279]: value = "task-2962881" [ 771.199620] env[68279]: _type = "Task" [ 771.199620] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.211298] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962880, 'name': CreateVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.219796] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.296994] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962877, 'name': ReconfigVM_Task, 'duration_secs': 0.558349} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.297472] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Reconfigured VM instance instance-00000027 to attach disk [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53/a332b35f-4f96-4f8f-aa9a-d7fadf9ede53.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.299261] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b24b60b3-19f7-40ca-8d1e-237a6ab55b40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.307258] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 771.307258] env[68279]: value = "task-2962882" [ 771.307258] env[68279]: _type = "Task" [ 771.307258] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.318376] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962882, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.601745] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.720896] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962880, 'name': CreateVM_Task, 'duration_secs': 0.34295} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.722452] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962881, 'name': PowerOffVM_Task, 'duration_secs': 0.254284} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.722452] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 771.722452] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 771.722452] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.722953] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.722953] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.723351] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 771.724233] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb82447-70e1-4d31-8bee-d74888f06f50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.727863] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d47118d4-33d7-4371-a336-25042e1b164c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.742113] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 771.745791] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad6cc9f8-520d-4283-bab4-e42f87a54a45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.746011] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 771.746011] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e4dbab-c10c-8d0e-d580-b75b0ab95aff" [ 771.746011] env[68279]: _type = "Task" [ 771.746011] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.763071] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e4dbab-c10c-8d0e-d580-b75b0ab95aff, 'name': SearchDatastore_Task, 'duration_secs': 0.009935} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.763577] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.763986] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.764409] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.764750] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.765059] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.765439] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8b9bd55-15c7-427d-8dc3-77420e4476c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.777709] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.777709] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.777709] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55146afe-43d7-4c9e-b816-177144fbce7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.782631] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 771.782631] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522dcebe-254d-dedd-3bc9-19ee489be465" [ 771.782631] env[68279]: _type = "Task" [ 771.782631] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.795542] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522dcebe-254d-dedd-3bc9-19ee489be465, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.809302] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 771.809545] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 771.809796] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleting the datastore file [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.813755] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e878ca57-94c1-434e-bad3-90ac963e7997 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.823670] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962882, 'name': Rename_Task, 'duration_secs': 0.190986} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.825381] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.825555] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 771.825555] env[68279]: value = "task-2962884" [ 771.825555] env[68279]: _type = "Task" [ 771.825555] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.831171] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcfac77c-c547-4a47-9263-db328dcee0f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.838437] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.840103] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Waiting for the task: (returnval){ [ 771.840103] env[68279]: value = "task-2962885" [ 771.840103] env[68279]: _type = "Task" [ 771.840103] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.853211] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962885, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.968623] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fda15d-9800-475f-9304-8da806205a53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.976855] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7912c543-44d2-4a95-a1df-a09023adb878 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.007508] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 772.008736] env[68279]: DEBUG nova.network.neutron [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Updated VIF entry in instance network info cache for port 1692e61e-7fc8-48fb-bed2-cb281c3b64e6. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 772.009093] env[68279]: DEBUG nova.network.neutron [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Updating instance_info_cache with network_info: [{"id": "1692e61e-7fc8-48fb-bed2-cb281c3b64e6", "address": "fa:16:3e:3a:c5:e3", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1692e61e-7f", "ovs_interfaceid": "1692e61e-7fc8-48fb-bed2-cb281c3b64e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.010340] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-869950a1-5ad1-459f-b95c-623edeb213c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.012559] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb1bf70-5549-4fe6-86c2-db2ef8058e4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.022468] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2617053-14d3-4ab4-a824-9a0f66873174 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.028128] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 772.028128] env[68279]: value = "task-2962886" [ 772.028128] env[68279]: _type = "Task" [ 772.028128] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.041580] env[68279]: DEBUG nova.compute.provider_tree [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 772.048877] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.294358] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522dcebe-254d-dedd-3bc9-19ee489be465, 'name': SearchDatastore_Task, 'duration_secs': 0.012035} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.295337] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d1332f9-a19a-47fd-b2b6-9a3718f1f0d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.301331] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 772.301331] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b9ebaa-40bd-535a-f3d2-78e2e76babee" [ 772.301331] env[68279]: _type = "Task" [ 772.301331] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.309871] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b9ebaa-40bd-535a-f3d2-78e2e76babee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.339815] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172504} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.340124] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.340313] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 772.340483] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.352749] env[68279]: DEBUG oslo_vmware.api [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Task: {'id': task-2962885, 'name': PowerOnVM_Task, 'duration_secs': 0.457962} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.354971] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.355300] env[68279]: DEBUG nova.compute.manager [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.356141] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31e0e7d-af5c-4ec2-8764-8508cf6aef3a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.517128] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfe1fa46-d032-493e-b1a9-71e9ce45f501 req-3fd6c200-d46f-4939-9c72-e5c539fbc844 service nova] Releasing lock "refresh_cache-1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.539487] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962886, 'name': PowerOffVM_Task, 'duration_secs': 0.166326} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.539762] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 772.540447] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 772.541312] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224b56a7-c410-481c-a8c4-70b22c056ceb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.552322] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 772.552457] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-effbefac-a710-4918-b229-450ee4409810 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.584293] env[68279]: DEBUG nova.scheduler.client.report [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 772.584293] env[68279]: DEBUG nova.compute.provider_tree [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 68 to 69 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 772.584293] env[68279]: DEBUG nova.compute.provider_tree [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 772.590980] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 772.596230] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 772.596650] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Deleting the datastore file [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 772.597258] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35c5f49c-a126-431d-9cc1-a14875021068 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.605611] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 772.605611] env[68279]: value = "task-2962888" [ 772.605611] env[68279]: _type = "Task" [ 772.605611] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.615987] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.813725] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b9ebaa-40bd-535a-f3d2-78e2e76babee, 'name': SearchDatastore_Task, 'duration_secs': 0.014552} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.814017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.814291] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae/1a604a32-78c1-49cf-bafd-e1dc94c8b3ae.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 772.814563] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0e56fe3-8379-4c0e-8b51-1e7d50b9ae88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.823448] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 772.823448] env[68279]: value = "task-2962889" [ 772.823448] env[68279]: _type = "Task" [ 772.823448] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.835174] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.873823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.100476] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.848s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.100476] env[68279]: DEBUG nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 773.104954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.073s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.107425] env[68279]: INFO nova.compute.claims [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.126292] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168077} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.126653] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 773.126884] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 773.127276] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 773.342275] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962889, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.381152] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.381432] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.381637] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.381834] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.381984] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.385117] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.385378] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.385549] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.385755] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.386163] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.386378] env[68279]: DEBUG nova.virt.hardware [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.387521] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817ca805-7ecf-4898-a2a4-c1fc062b93e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.397473] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d65c48-bd22-46df-afcf-0775176f1419 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.415594] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:cc:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab5d6d31-7d88-47ee-a53a-80e39c3e2a72', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.423535] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 773.423863] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.424225] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-677eb844-12bd-40c8-a4ca-3e530bea1c4d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.445782] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.445782] env[68279]: value = "task-2962890" [ 773.445782] env[68279]: _type = "Task" [ 773.445782] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.458025] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962890, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.608071] env[68279]: DEBUG nova.compute.utils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 773.608343] env[68279]: DEBUG nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 773.608738] env[68279]: DEBUG nova.network.neutron [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 773.695323] env[68279]: DEBUG nova.policy [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88074da303124c9db173ac0c253f5c27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36fa09849bed42f69be37a023b710523', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 773.834257] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626674} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.834516] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae/1a604a32-78c1-49cf-bafd-e1dc94c8b3ae.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.834826] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.835117] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9809333d-c1ea-4be3-802a-8c1d02a857b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.853451] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 773.853451] env[68279]: value = "task-2962891" [ 773.853451] env[68279]: _type = "Task" [ 773.853451] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.867144] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962891, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.958112] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962890, 'name': CreateVM_Task, 'duration_secs': 0.378161} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.958484] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.960967] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.960967] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.961177] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 773.961404] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4807ae42-f9b8-4283-b910-d2e2d3827cc1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.968472] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 773.968472] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525c03f2-2a93-82e6-7523-d15165fd0881" [ 773.968472] env[68279]: _type = "Task" [ 773.968472] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.984963] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525c03f2-2a93-82e6-7523-d15165fd0881, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.000502] env[68279]: DEBUG nova.network.neutron [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Successfully created port: 14d0066b-e387-4f2f-a12a-c40206f0b1d0 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 774.081034] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.081274] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.112403] env[68279]: DEBUG nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 774.166697] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 774.166784] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.166897] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.167134] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.167291] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.167440] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 774.167643] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 774.167804] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 774.167971] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 774.168379] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 774.168618] env[68279]: DEBUG nova.virt.hardware [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 774.169769] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd9ee80-54ba-44ba-85b4-73aa28b281ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.184179] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adbc96b-8f1a-4603-b3c5-13f47e59cc62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.205734] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 774.211627] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 774.218022] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 774.218022] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b5fbc45-f886-4987-b7c6-ab63df40f1a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.236389] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 774.236389] env[68279]: value = "task-2962892" [ 774.236389] env[68279]: _type = "Task" [ 774.236389] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.245296] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962892, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.352533] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.352831] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.353041] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.353159] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.353329] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.357939] env[68279]: INFO nova.compute.manager [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Terminating instance [ 774.370558] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962891, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122146} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.373692] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.375429] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae02baf-ba95-41a9-8c12-3b316326bd06 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.401675] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae/1a604a32-78c1-49cf-bafd-e1dc94c8b3ae.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.404906] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-892f2a73-e0fd-4d19-ac63-9f71d78cf066 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.442184] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 774.442184] env[68279]: value = "task-2962893" [ 774.442184] env[68279]: _type = "Task" [ 774.442184] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.452462] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962893, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.484591] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525c03f2-2a93-82e6-7523-d15165fd0881, 'name': SearchDatastore_Task, 'duration_secs': 0.016229} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.487841] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.487939] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.488133] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.488280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.488454] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.489387] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2f0d508-4ea7-4f10-8e93-6afcd40f0f51 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.500015] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.503228] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.506201] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a52a6a9-187a-4dfa-a9b3-c83be02615ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.513913] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 774.513913] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523b7770-fb61-8e52-9d28-aea27baaaca8" [ 774.513913] env[68279]: _type = "Task" [ 774.513913] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.525742] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523b7770-fb61-8e52-9d28-aea27baaaca8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.747059] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962892, 'name': CreateVM_Task, 'duration_secs': 0.444003} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.750462] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.750712] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.750879] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.751231] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 774.751489] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9780b901-a117-4d20-9bd3-8a2dc5066d7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.757348] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 774.757348] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52201532-970e-30bb-aeef-cfb1aa11a354" [ 774.757348] env[68279]: _type = "Task" [ 774.757348] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.766571] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52201532-970e-30bb-aeef-cfb1aa11a354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.844728] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e130715-083b-4425-b4d6-89f459b90562 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.854357] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0d2b19-868c-46ac-9094-1111fb47b7ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.889674] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "refresh_cache-a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.889889] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquired lock "refresh_cache-a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.890198] env[68279]: DEBUG nova.network.neutron [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.893443] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eaf0e13-7bc7-4828-9ee6-1e2d61a0b04c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.904370] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88920949-18a1-41ba-a5bd-359271bb09e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.921734] env[68279]: DEBUG nova.compute.provider_tree [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.955304] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962893, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.025715] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523b7770-fb61-8e52-9d28-aea27baaaca8, 'name': SearchDatastore_Task, 'duration_secs': 0.019486} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.026630] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-733d78ed-8aea-4b65-a0c5-3939e4f856cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.035855] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 775.035855] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bd1f4a-fe28-65c6-78da-d019e7dfd223" [ 775.035855] env[68279]: _type = "Task" [ 775.035855] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.045139] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bd1f4a-fe28-65c6-78da-d019e7dfd223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.126687] env[68279]: DEBUG nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 775.155897] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:45:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1a90e579-85e0-4e3b-aa60-125e40db2a15',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1064338248',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 775.156205] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 775.156416] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 775.156722] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 775.156891] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 775.157104] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 775.157334] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 775.157493] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 775.157680] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 775.157905] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 775.158136] env[68279]: DEBUG nova.virt.hardware [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 775.159120] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3849ba6-addc-4522-adf3-0ca6932ba2a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.169069] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b0ddeb-c637-4746-bc3e-1b94e3390f31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.271070] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52201532-970e-30bb-aeef-cfb1aa11a354, 'name': SearchDatastore_Task, 'duration_secs': 0.014826} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.271233] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.271544] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 775.271833] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.272015] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.272947] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 775.272947] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-019f98f3-2554-4a84-a51e-5ad7c053bfc8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.284226] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 775.284444] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 775.285284] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70dd967c-95c0-4111-827c-cf5ba6f0e9fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.291978] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 775.291978] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e9bbca-3d82-8e79-00b2-f4f66da8e7b4" [ 775.291978] env[68279]: _type = "Task" [ 775.291978] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.302999] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e9bbca-3d82-8e79-00b2-f4f66da8e7b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.413093] env[68279]: DEBUG nova.compute.manager [req-9c0619b5-a99b-4300-bbbf-9936547eb444 req-373f18dd-c784-4d16-a838-3e74d2d3b1d5 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Received event network-vif-plugged-14d0066b-e387-4f2f-a12a-c40206f0b1d0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 775.413582] env[68279]: DEBUG oslo_concurrency.lockutils [req-9c0619b5-a99b-4300-bbbf-9936547eb444 req-373f18dd-c784-4d16-a838-3e74d2d3b1d5 service nova] Acquiring lock "665d932d-1068-4bb2-835c-2184a80753d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.413789] env[68279]: DEBUG oslo_concurrency.lockutils [req-9c0619b5-a99b-4300-bbbf-9936547eb444 req-373f18dd-c784-4d16-a838-3e74d2d3b1d5 service nova] Lock "665d932d-1068-4bb2-835c-2184a80753d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.413957] env[68279]: DEBUG oslo_concurrency.lockutils [req-9c0619b5-a99b-4300-bbbf-9936547eb444 req-373f18dd-c784-4d16-a838-3e74d2d3b1d5 service nova] Lock "665d932d-1068-4bb2-835c-2184a80753d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.414411] env[68279]: DEBUG nova.compute.manager [req-9c0619b5-a99b-4300-bbbf-9936547eb444 req-373f18dd-c784-4d16-a838-3e74d2d3b1d5 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] No waiting events found dispatching network-vif-plugged-14d0066b-e387-4f2f-a12a-c40206f0b1d0 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 775.414598] env[68279]: WARNING nova.compute.manager [req-9c0619b5-a99b-4300-bbbf-9936547eb444 req-373f18dd-c784-4d16-a838-3e74d2d3b1d5 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Received unexpected event network-vif-plugged-14d0066b-e387-4f2f-a12a-c40206f0b1d0 for instance with vm_state building and task_state spawning. [ 775.416145] env[68279]: DEBUG nova.network.neutron [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.425063] env[68279]: DEBUG nova.scheduler.client.report [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.455618] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962893, 'name': ReconfigVM_Task, 'duration_secs': 0.530604} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.455618] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae/1a604a32-78c1-49cf-bafd-e1dc94c8b3ae.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.455807] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c78981a-aca3-4c67-895c-94cef148f902 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.464183] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 775.464183] env[68279]: value = "task-2962894" [ 775.464183] env[68279]: _type = "Task" [ 775.464183] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.474036] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962894, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.505795] env[68279]: DEBUG nova.network.neutron [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.530078] env[68279]: DEBUG nova.network.neutron [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Successfully updated port: 14d0066b-e387-4f2f-a12a-c40206f0b1d0 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.546630] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bd1f4a-fe28-65c6-78da-d019e7dfd223, 'name': SearchDatastore_Task, 'duration_secs': 0.018385} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.546907] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.547200] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.547498] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a22239c3-b596-4ecb-9501-89a20acc903e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.557346] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 775.557346] env[68279]: value = "task-2962895" [ 775.557346] env[68279]: _type = "Task" [ 775.557346] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.567661] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962895, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.803399] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e9bbca-3d82-8e79-00b2-f4f66da8e7b4, 'name': SearchDatastore_Task, 'duration_secs': 0.015236} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.804138] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3df13844-907d-4faa-b841-a6c1614dca2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.810462] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 775.810462] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dae27a-6fe8-9f7e-d505-6d56edc75d63" [ 775.810462] env[68279]: _type = "Task" [ 775.810462] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.819773] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dae27a-6fe8-9f7e-d505-6d56edc75d63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.930224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.825s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.930819] env[68279]: DEBUG nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 775.933781] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.660s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.935667] env[68279]: INFO nova.compute.claims [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.977838] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962894, 'name': Rename_Task, 'duration_secs': 0.221035} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.978215] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.978483] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07f49a24-3ea9-4772-9f7a-a9576d995584 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.988039] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 775.988039] env[68279]: value = "task-2962896" [ 775.988039] env[68279]: _type = "Task" [ 775.988039] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.998898] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.009145] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Releasing lock "refresh_cache-a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.009601] env[68279]: DEBUG nova.compute.manager [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 776.009828] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 776.010741] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acbfed2-d9a3-4ad4-9a70-ac258f24781a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.019989] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 776.021166] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb463328-c8d9-49d4-93da-56f79ba3fe26 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.030208] env[68279]: DEBUG oslo_vmware.api [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 776.030208] env[68279]: value = "task-2962897" [ 776.030208] env[68279]: _type = "Task" [ 776.030208] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.035023] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.035204] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.035355] env[68279]: DEBUG nova.network.neutron [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.043063] env[68279]: DEBUG oslo_vmware.api [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.069021] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962895, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.322947] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dae27a-6fe8-9f7e-d505-6d56edc75d63, 'name': SearchDatastore_Task, 'duration_secs': 0.015987} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.323274] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.323590] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 776.323889] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0f4ec4e-cd2e-4656-a2d2-dafb1654f593 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.332532] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 776.332532] env[68279]: value = "task-2962898" [ 776.332532] env[68279]: _type = "Task" [ 776.332532] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.343092] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.442927] env[68279]: DEBUG nova.compute.utils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 776.444453] env[68279]: DEBUG nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 776.444666] env[68279]: DEBUG nova.network.neutron [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 776.483587] env[68279]: DEBUG nova.policy [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98ee85328964497482886fb16ff6e25f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '182c7f7affa443dba0ce3affd30eed42', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 776.499238] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962896, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.543073] env[68279]: DEBUG oslo_vmware.api [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962897, 'name': PowerOffVM_Task, 'duration_secs': 0.255766} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.543528] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 776.543644] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 776.544204] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c97706fd-ea8d-477d-8a67-13785b2e24b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.569298] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962895, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652192} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.569585] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.569801] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.570254] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a891667-65fd-4625-a565-abd0f6f88430 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.572732] env[68279]: DEBUG nova.network.neutron [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.578150] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 776.578352] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 776.580572] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Deleting the datastore file [datastore1] a332b35f-4f96-4f8f-aa9a-d7fadf9ede53 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 776.580572] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bba09bf-8543-4319-98b7-153583fe09d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.582845] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 776.582845] env[68279]: value = "task-2962900" [ 776.582845] env[68279]: _type = "Task" [ 776.582845] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.587677] env[68279]: DEBUG oslo_vmware.api [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for the task: (returnval){ [ 776.587677] env[68279]: value = "task-2962901" [ 776.587677] env[68279]: _type = "Task" [ 776.587677] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.595615] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.602157] env[68279]: DEBUG oslo_vmware.api [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.818787] env[68279]: DEBUG nova.network.neutron [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Successfully created port: d8067a6f-39fd-42be-8f8e-23d5dea92c8b {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.829468] env[68279]: DEBUG nova.network.neutron [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [{"id": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "address": "fa:16:3e:3c:9f:44", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d0066b-e3", "ovs_interfaceid": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.845689] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962898, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.949310] env[68279]: DEBUG nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 777.007021] env[68279]: DEBUG oslo_vmware.api [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962896, 'name': PowerOnVM_Task, 'duration_secs': 0.782405} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.007021] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.007751] env[68279]: INFO nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Took 8.30 seconds to spawn the instance on the hypervisor. [ 777.008139] env[68279]: DEBUG nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.009791] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd7c594-3338-4c89-8899-ee997e584524 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.099240] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081532} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.100145] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.103768] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f829ace-3784-4651-bd27-3a5b9444cdb4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.106644] env[68279]: DEBUG oslo_vmware.api [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Task: {'id': task-2962901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213882} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.107262] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 777.107411] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 777.107581] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 777.107777] env[68279]: INFO nova.compute.manager [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Took 1.10 seconds to destroy the instance on the hypervisor. [ 777.108021] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 777.108210] env[68279]: DEBUG nova.compute.manager [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 777.108299] env[68279]: DEBUG nova.network.neutron [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.128051] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.131237] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bfb4a60-e488-4bd5-8750-8cf8a37a49d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.150777] env[68279]: DEBUG nova.network.neutron [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.154450] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 777.154450] env[68279]: value = "task-2962902" [ 777.154450] env[68279]: _type = "Task" [ 777.154450] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.169658] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962902, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.333653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.333653] env[68279]: DEBUG nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Instance network_info: |[{"id": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "address": "fa:16:3e:3c:9f:44", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d0066b-e3", "ovs_interfaceid": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 777.334145] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:9f:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14d0066b-e387-4f2f-a12a-c40206f0b1d0', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.342077] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 777.349656] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.350344] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00f4272d-93ff-46ee-8139-711b5d9743a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.374143] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962898, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666238} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.374450] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.374711] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.376287] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f5ee5f8-4b6d-4fed-859d-3e8558986300 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.378079] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.378079] env[68279]: value = "task-2962903" [ 777.378079] env[68279]: _type = "Task" [ 777.378079] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.386252] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 777.386252] env[68279]: value = "task-2962904" [ 777.386252] env[68279]: _type = "Task" [ 777.386252] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.392462] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962903, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.398254] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962904, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.445124] env[68279]: DEBUG nova.compute.manager [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Received event network-changed-14d0066b-e387-4f2f-a12a-c40206f0b1d0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 777.445392] env[68279]: DEBUG nova.compute.manager [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Refreshing instance network info cache due to event network-changed-14d0066b-e387-4f2f-a12a-c40206f0b1d0. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 777.445538] env[68279]: DEBUG oslo_concurrency.lockutils [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] Acquiring lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.445678] env[68279]: DEBUG oslo_concurrency.lockutils [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] Acquired lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.445836] env[68279]: DEBUG nova.network.neutron [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Refreshing network info cache for port 14d0066b-e387-4f2f-a12a-c40206f0b1d0 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.529760] env[68279]: INFO nova.compute.manager [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Took 33.26 seconds to build instance. [ 777.586234] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d8a2b0-65a7-473a-8131-6d92e1770608 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.594637] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a611f2e-bbe0-4a7e-9bc6-e86be4b0b31b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.629696] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a39509-f6b0-4ce6-af0b-725003fdbee8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.638551] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eda64c0-bd24-4b06-bb1b-db58dce21c1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.653773] env[68279]: DEBUG nova.compute.provider_tree [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.655371] env[68279]: DEBUG nova.network.neutron [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.666521] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962902, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.888398] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962903, 'name': CreateVM_Task, 'duration_secs': 0.491544} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.891475] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.892201] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.892369] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.892696] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 777.893319] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d2ff969-34a8-4996-9280-205e239be199 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.898112] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962904, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14496} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.899406] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.899748] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 777.899748] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5277f9fb-5977-4868-1780-d06c9ed15ea4" [ 777.899748] env[68279]: _type = "Task" [ 777.899748] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.900443] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f832e337-3d9a-4a93-b8f0-53e7d44a571e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.912759] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5277f9fb-5977-4868-1780-d06c9ed15ea4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.930264] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.930970] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dddafb50-3690-413d-93f5-089ccd89e0cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.956956] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 777.956956] env[68279]: value = "task-2962905" [ 777.956956] env[68279]: _type = "Task" [ 777.956956] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.961738] env[68279]: DEBUG nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 777.971830] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962905, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.989016] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 777.989257] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.989410] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.989585] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.989751] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.989934] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.990181] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.990347] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.990522] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.990670] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.990842] env[68279]: DEBUG nova.virt.hardware [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.992065] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c1283e-fd55-47af-9698-c6d59f0840fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.004896] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ba757c-a700-2dba-fd24-e3cede62e06c/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 778.006311] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e667bbab-60a2-4f79-b964-8ca0aa4b0302 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.010945] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b778a11e-4abf-4f33-bbe7-69f5d783d50c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.017471] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ba757c-a700-2dba-fd24-e3cede62e06c/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 778.017632] env[68279]: ERROR oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ba757c-a700-2dba-fd24-e3cede62e06c/disk-0.vmdk due to incomplete transfer. [ 778.025431] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-58d3bb89-5b5a-4de1-bd05-9f8f160dce62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.032114] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fe3663e9-d16a-4f74-be28-94f41988679b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.527s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.035388] env[68279]: DEBUG oslo_vmware.rw_handles [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ba757c-a700-2dba-fd24-e3cede62e06c/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 778.035644] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Uploaded image b217a09c-2e46-46c6-8837-a229cfd46700 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 778.038015] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 778.038254] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-737a1a4b-6c28-405b-aada-be531e4b40e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.046452] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 778.046452] env[68279]: value = "task-2962906" [ 778.046452] env[68279]: _type = "Task" [ 778.046452] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.055544] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962906, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.163239] env[68279]: INFO nova.compute.manager [-] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Took 1.05 seconds to deallocate network for instance. [ 778.176688] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962902, 'name': ReconfigVM_Task, 'duration_secs': 0.561092} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.177715] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.180162] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d98ae8b-4f63-4c29-9b87-f12c334316bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.182543] env[68279]: ERROR nova.scheduler.client.report [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [req-46eaf8d8-0c71-4c9c-b997-2342d1565a2d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-46eaf8d8-0c71-4c9c-b997-2342d1565a2d"}]} [ 778.196252] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 778.196252] env[68279]: value = "task-2962907" [ 778.196252] env[68279]: _type = "Task" [ 778.196252] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.215023] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962907, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.215023] env[68279]: DEBUG nova.scheduler.client.report [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 778.234343] env[68279]: DEBUG nova.scheduler.client.report [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 778.234592] env[68279]: DEBUG nova.compute.provider_tree [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 778.255585] env[68279]: DEBUG nova.scheduler.client.report [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 778.277317] env[68279]: DEBUG nova.scheduler.client.report [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 778.358988] env[68279]: DEBUG nova.network.neutron [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updated VIF entry in instance network info cache for port 14d0066b-e387-4f2f-a12a-c40206f0b1d0. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 778.359346] env[68279]: DEBUG nova.network.neutron [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [{"id": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "address": "fa:16:3e:3c:9f:44", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d0066b-e3", "ovs_interfaceid": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.421343] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5277f9fb-5977-4868-1780-d06c9ed15ea4, 'name': SearchDatastore_Task, 'duration_secs': 0.027591} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.424602] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.424844] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.425098] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.425249] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.425427] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.425921] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46bc773c-032b-4e33-9514-36e8325d0a7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.442490] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.442717] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.443478] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59785f8e-6d7c-4350-a1bb-063b50445b42 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.452611] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 778.452611] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528c8f6f-a396-7bbe-f3a0-a013dbbd18da" [ 778.452611] env[68279]: _type = "Task" [ 778.452611] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.453149] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.453282] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.453501] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.453653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.453821] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.456122] env[68279]: INFO nova.compute.manager [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Terminating instance [ 778.478021] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528c8f6f-a396-7bbe-f3a0-a013dbbd18da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.479023] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962905, 'name': ReconfigVM_Task, 'duration_secs': 0.513461} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.480362] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7/43f629d6-bdc3-4345-97ec-26ce2c9d7be7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.480681] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7279017-e8fa-4f48-af90-cf4275a1766b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.484032] env[68279]: DEBUG nova.network.neutron [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Successfully updated port: d8067a6f-39fd-42be-8f8e-23d5dea92c8b {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.494803] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 778.494803] env[68279]: value = "task-2962908" [ 778.494803] env[68279]: _type = "Task" [ 778.494803] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.507279] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962908, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.536450] env[68279]: DEBUG nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.564446] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962906, 'name': Destroy_Task, 'duration_secs': 0.332013} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.564920] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Destroyed the VM [ 778.565124] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 778.566026] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cad7d0db-862c-4e6b-8a6f-f34afeab361c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.582416] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 778.582416] env[68279]: value = "task-2962909" [ 778.582416] env[68279]: _type = "Task" [ 778.582416] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.598695] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962909, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.681632] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.714464] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962907, 'name': Rename_Task, 'duration_secs': 0.148649} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.714464] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.714830] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffbf96a3-5db1-4c28-8ce6-ef6e2fc7bf58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.725114] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 778.725114] env[68279]: value = "task-2962910" [ 778.725114] env[68279]: _type = "Task" [ 778.725114] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.737061] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962910, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.861810] env[68279]: DEBUG oslo_concurrency.lockutils [req-63b82061-d08a-4ce1-8f42-8ea13246c08f req-14ed8438-f0e1-41e0-abb7-4919956b0215 service nova] Releasing lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.884251] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04186dfb-027f-47cf-9b83-f301e7527d45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.893030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b50045-c059-45e4-872c-90fc72186de1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.928017] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83505e12-8a6a-4fb7-b0fa-604ea9b7f10a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.937055] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149d972e-73e3-4b3e-a02f-3d4b9e9722ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.952792] env[68279]: DEBUG nova.compute.provider_tree [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.960736] env[68279]: DEBUG nova.compute.manager [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 778.960997] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 778.961847] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caa267c-49a5-4fa8-a8c0-6c41ad7a4b56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.969515] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528c8f6f-a396-7bbe-f3a0-a013dbbd18da, 'name': SearchDatastore_Task, 'duration_secs': 0.052076} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.971040] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-933f7ac5-22a2-459b-a61c-1321d0aeafde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.976216] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 778.976342] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57079a99-5c29-40e5-b146-27935a92e674 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.979549] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 778.979549] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5297074a-a079-5956-ee78-bf3141f94e01" [ 778.979549] env[68279]: _type = "Task" [ 778.979549] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.986034] env[68279]: DEBUG oslo_vmware.api [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 778.986034] env[68279]: value = "task-2962911" [ 778.986034] env[68279]: _type = "Task" [ 778.986034] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.991233] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.991233] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.991391] env[68279]: DEBUG nova.network.neutron [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.993332] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5297074a-a079-5956-ee78-bf3141f94e01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.003168] env[68279]: DEBUG oslo_vmware.api [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.012274] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962908, 'name': Rename_Task, 'duration_secs': 0.149642} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.012274] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.012274] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-633d0f45-b499-473a-9d71-2445282b30d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.020784] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 779.020784] env[68279]: value = "task-2962912" [ 779.020784] env[68279]: _type = "Task" [ 779.020784] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.030762] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.062457] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.099397] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962909, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.236311] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962910, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.459996] env[68279]: DEBUG nova.scheduler.client.report [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 779.494112] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5297074a-a079-5956-ee78-bf3141f94e01, 'name': SearchDatastore_Task, 'duration_secs': 0.019565} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.494636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.494847] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.497644] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a74355e-2fd3-4812-ab8a-bb4bb0acd249 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.503606] env[68279]: DEBUG oslo_vmware.api [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962911, 'name': PowerOffVM_Task, 'duration_secs': 0.241909} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.504428] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.504597] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.504836] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d82ecce-ae12-46dd-af00-8f875eaaffbb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.509749] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 779.509749] env[68279]: value = "task-2962913" [ 779.509749] env[68279]: _type = "Task" [ 779.509749] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.517950] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.532265] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.533428] env[68279]: DEBUG nova.network.neutron [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.553707] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "01a624d3-782d-44cf-8a4e-05a85ac91c64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.554105] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.554382] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "01a624d3-782d-44cf-8a4e-05a85ac91c64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.554566] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.554763] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.557157] env[68279]: INFO nova.compute.manager [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Terminating instance [ 779.575945] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 779.576323] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 779.576603] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleting the datastore file [datastore1] 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 779.576979] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5d3a21a-1b9a-4160-aca3-a720564ef68e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.588328] env[68279]: DEBUG oslo_vmware.api [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 779.588328] env[68279]: value = "task-2962915" [ 779.588328] env[68279]: _type = "Task" [ 779.588328] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.610881] env[68279]: DEBUG oslo_vmware.api [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962909, 'name': RemoveSnapshot_Task, 'duration_secs': 0.651195} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.614508] env[68279]: DEBUG oslo_vmware.api [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.614508] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 779.614508] env[68279]: INFO nova.compute.manager [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Took 13.36 seconds to snapshot the instance on the hypervisor. [ 779.625911] env[68279]: DEBUG nova.compute.manager [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Received event network-vif-plugged-d8067a6f-39fd-42be-8f8e-23d5dea92c8b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.626182] env[68279]: DEBUG oslo_concurrency.lockutils [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] Acquiring lock "fe92e176-222c-4c46-a254-1c12e21c68d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.626408] env[68279]: DEBUG oslo_concurrency.lockutils [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.626621] env[68279]: DEBUG oslo_concurrency.lockutils [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.627551] env[68279]: DEBUG nova.compute.manager [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] No waiting events found dispatching network-vif-plugged-d8067a6f-39fd-42be-8f8e-23d5dea92c8b {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.627551] env[68279]: WARNING nova.compute.manager [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Received unexpected event network-vif-plugged-d8067a6f-39fd-42be-8f8e-23d5dea92c8b for instance with vm_state building and task_state spawning. [ 779.627551] env[68279]: DEBUG nova.compute.manager [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Received event network-changed-d8067a6f-39fd-42be-8f8e-23d5dea92c8b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.627551] env[68279]: DEBUG nova.compute.manager [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Refreshing instance network info cache due to event network-changed-d8067a6f-39fd-42be-8f8e-23d5dea92c8b. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 779.627551] env[68279]: DEBUG oslo_concurrency.lockutils [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] Acquiring lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.722871] env[68279]: DEBUG nova.network.neutron [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Updating instance_info_cache with network_info: [{"id": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "address": "fa:16:3e:ae:b4:7f", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8067a6f-39", "ovs_interfaceid": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.739685] env[68279]: DEBUG oslo_vmware.api [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962910, 'name': PowerOnVM_Task, 'duration_secs': 0.578306} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.740018] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.740275] env[68279]: DEBUG nova.compute.manager [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.741117] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56bec8e-ae82-4d69-bf71-64cecb1e170b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.965648] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.032s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.966254] env[68279]: DEBUG nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 779.969129] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.323s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.972512] env[68279]: INFO nova.compute.claims [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.024013] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493448} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.028824] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 780.030068] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.030516] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5afaeccf-69ec-4d0d-bd9c-bfa0f6f2fd75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.042530] env[68279]: DEBUG oslo_vmware.api [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962912, 'name': PowerOnVM_Task, 'duration_secs': 0.622947} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.044352] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.044650] env[68279]: DEBUG nova.compute.manager [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.045515] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 780.045515] env[68279]: value = "task-2962916" [ 780.045515] env[68279]: _type = "Task" [ 780.045515] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.046595] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ee4a40-0afc-4764-ba08-2156dfd3c025 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.062590] env[68279]: DEBUG nova.compute.manager [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 780.062902] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.063721] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.066719] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b96656-35d5-446f-b35f-2ed6413eea5b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.081418] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 780.081801] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4510d5ce-893e-49a5-a98b-ab0623d253c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.106252] env[68279]: DEBUG oslo_vmware.api [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2962915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190476} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.106652] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 780.106925] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 780.107262] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.107747] env[68279]: INFO nova.compute.manager [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Took 1.15 seconds to destroy the instance on the hypervisor. [ 780.107894] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.109800] env[68279]: DEBUG nova.compute.manager [-] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 780.109943] env[68279]: DEBUG nova.network.neutron [-] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.117576] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "67466e30-5944-490c-a89b-2d32c59525be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.117923] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "67466e30-5944-490c-a89b-2d32c59525be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.118552] env[68279]: DEBUG nova.compute.manager [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Instance disappeared during snapshot {{(pid=68279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 780.132386] env[68279]: DEBUG nova.compute.manager [None req-1fb05865-edb0-4db7-88e2-37db0f5ab954 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image not found during clean up b217a09c-2e46-46c6-8837-a229cfd46700 {{(pid=68279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 780.190727] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.190947] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.191145] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleting the datastore file [datastore2] 01a624d3-782d-44cf-8a4e-05a85ac91c64 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.191413] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa98a22a-7b6a-454a-ac94-f773b2cf1587 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.201328] env[68279]: DEBUG oslo_vmware.api [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 780.201328] env[68279]: value = "task-2962918" [ 780.201328] env[68279]: _type = "Task" [ 780.201328] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.209754] env[68279]: DEBUG oslo_vmware.api [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.225854] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.226244] env[68279]: DEBUG nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Instance network_info: |[{"id": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "address": "fa:16:3e:ae:b4:7f", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8067a6f-39", "ovs_interfaceid": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 780.226640] env[68279]: DEBUG oslo_concurrency.lockutils [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] Acquired lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.226872] env[68279]: DEBUG nova.network.neutron [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Refreshing network info cache for port d8067a6f-39fd-42be-8f8e-23d5dea92c8b {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 780.228152] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:b4:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8067a6f-39fd-42be-8f8e-23d5dea92c8b', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 780.235987] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating folder: Project (182c7f7affa443dba0ce3affd30eed42). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 780.239349] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab26f3ce-09e5-4e35-9bb3-49fe354317ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.259753] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created folder: Project (182c7f7affa443dba0ce3affd30eed42) in parent group-v594445. [ 780.260348] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating folder: Instances. Parent ref: group-v594583. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 780.260578] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e1698e7-3288-49b7-929c-806b18b0d6ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.264948] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.275484] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created folder: Instances in parent group-v594583. [ 780.275909] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.276258] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 780.276590] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdf7bbc8-6e6f-4bc6-8d32-0d8d87ac93ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.298992] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 780.298992] env[68279]: value = "task-2962921" [ 780.298992] env[68279]: _type = "Task" [ 780.298992] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.309467] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962921, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.477909] env[68279]: DEBUG nova.compute.utils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 780.483237] env[68279]: DEBUG nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 780.483237] env[68279]: DEBUG nova.network.neutron [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 780.527163] env[68279]: DEBUG nova.policy [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98ee85328964497482886fb16ff6e25f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '182c7f7affa443dba0ce3affd30eed42', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 780.563934] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101431} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.564242] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.565084] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5973c3-c2fd-46a4-9546-c3f86c066a7c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.593630] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.595707] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e80580bc-f3f9-4d60-b6b2-0c92eb34834c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.613038] env[68279]: DEBUG nova.network.neutron [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Updated VIF entry in instance network info cache for port d8067a6f-39fd-42be-8f8e-23d5dea92c8b. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.613038] env[68279]: DEBUG nova.network.neutron [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Updating instance_info_cache with network_info: [{"id": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "address": "fa:16:3e:ae:b4:7f", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8067a6f-39", "ovs_interfaceid": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.614422] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.622481] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 780.622481] env[68279]: value = "task-2962922" [ 780.622481] env[68279]: _type = "Task" [ 780.622481] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.631751] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962922, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.712683] env[68279]: DEBUG oslo_vmware.api [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2962918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199771} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.713177] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 780.713278] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 780.713884] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.713884] env[68279]: INFO nova.compute.manager [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Took 0.65 seconds to destroy the instance on the hypervisor. [ 780.714015] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.714242] env[68279]: DEBUG nova.compute.manager [-] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 780.714365] env[68279]: DEBUG nova.network.neutron [-] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.804856] env[68279]: DEBUG nova.network.neutron [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Successfully created port: d0861d0a-53ee-41f0-b051-d3c2f213a4a7 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.810936] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962921, 'name': CreateVM_Task, 'duration_secs': 0.414265} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.811167] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.812013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.812013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.812370] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 780.812672] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-803a0185-5a02-4ab6-88fd-b11cacc17d49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.818896] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 780.818896] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d4fff-2cd0-00cf-8273-67078d896121" [ 780.818896] env[68279]: _type = "Task" [ 780.818896] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.828103] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d4fff-2cd0-00cf-8273-67078d896121, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.875831] env[68279]: DEBUG nova.network.neutron [-] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.986314] env[68279]: DEBUG nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 781.117241] env[68279]: DEBUG oslo_concurrency.lockutils [req-600ec986-d415-4906-a566-b192d6335a3b req-da79d2eb-4306-45ec-a2b4-5d1556f03882 service nova] Releasing lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.136753] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962922, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.297110] env[68279]: DEBUG nova.compute.manager [req-a69649b8-e82e-4368-9805-f0b3898e3290 req-9a81f3e5-cff5-494c-9ff8-3ef2af4ee9d8 service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Received event network-vif-deleted-f28928fb-b9ce-40c3-87b6-80cb1360cef6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 781.297314] env[68279]: INFO nova.compute.manager [req-a69649b8-e82e-4368-9805-f0b3898e3290 req-9a81f3e5-cff5-494c-9ff8-3ef2af4ee9d8 service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Neutron deleted interface f28928fb-b9ce-40c3-87b6-80cb1360cef6; detaching it from the instance and deleting it from the info cache [ 781.297485] env[68279]: DEBUG nova.network.neutron [req-a69649b8-e82e-4368-9805-f0b3898e3290 req-9a81f3e5-cff5-494c-9ff8-3ef2af4ee9d8 service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.330791] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d4fff-2cd0-00cf-8273-67078d896121, 'name': SearchDatastore_Task, 'duration_secs': 0.010136} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.333466] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.333705] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.333935] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.334099] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.334282] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.334941] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-111ccd51-6a6e-490e-bc01-95a63ab66474 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.349010] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.349010] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.349321] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0d54abb-fa62-4d7c-9d1c-4c5e1cf37032 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.358186] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 781.358186] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524c008e-7016-c37e-48f3-941a446fe0a1" [ 781.358186] env[68279]: _type = "Task" [ 781.358186] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.366686] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524c008e-7016-c37e-48f3-941a446fe0a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.382039] env[68279]: INFO nova.compute.manager [-] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Took 1.27 seconds to deallocate network for instance. [ 781.511068] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201c8828-a7f6-405f-af7e-8ef0042fe14c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.519152] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77db0d7c-88bb-4030-a4bc-83acc98b2b75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.524287] env[68279]: INFO nova.compute.manager [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Rebuilding instance [ 781.554947] env[68279]: DEBUG nova.network.neutron [-] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.559027] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd79c19-37d1-4508-a749-e6f432f2be57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.571682] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c94a50-9634-4e3f-a48d-0fa9abad1c6c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.586617] env[68279]: DEBUG nova.compute.provider_tree [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.592090] env[68279]: DEBUG nova.compute.manager [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.592874] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b74d930-490d-4125-8c2c-057846a5477d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.634032] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962922, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.654328] env[68279]: DEBUG nova.compute.manager [req-0e559a2e-6278-40da-b69c-667b5f59e444 req-7c98f386-9102-4f5f-b14e-7df609e3d007 service nova] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Received event network-vif-deleted-1692e61e-7fc8-48fb-bed2-cb281c3b64e6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 781.799933] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b535b367-ddb9-42bc-baf5-957a9f78c0a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.810178] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c96d0a7-2dff-4762-bfc1-c4b408880f29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.850524] env[68279]: DEBUG nova.compute.manager [req-a69649b8-e82e-4368-9805-f0b3898e3290 req-9a81f3e5-cff5-494c-9ff8-3ef2af4ee9d8 service nova] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Detach interface failed, port_id=f28928fb-b9ce-40c3-87b6-80cb1360cef6, reason: Instance 01a624d3-782d-44cf-8a4e-05a85ac91c64 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 781.870369] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524c008e-7016-c37e-48f3-941a446fe0a1, 'name': SearchDatastore_Task, 'duration_secs': 0.014254} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.870369] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570a1c2c-89b9-4b98-82ee-8b4a51b19246 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.875608] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 781.875608] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5257ee39-9874-8446-8b98-5d7a2d12cf28" [ 781.875608] env[68279]: _type = "Task" [ 781.875608] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.884717] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5257ee39-9874-8446-8b98-5d7a2d12cf28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.890825] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.922518] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.922715] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.923055] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.923154] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.923336] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.927036] env[68279]: INFO nova.compute.manager [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Terminating instance [ 781.995393] env[68279]: DEBUG nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 782.023472] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.023733] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.023893] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.024107] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.024261] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.024412] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.024633] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.024794] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.024976] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.025155] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.025327] env[68279]: DEBUG nova.virt.hardware [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.026219] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc5cc3b-4e6b-4071-b579-769b215d3fb4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.035792] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7488a729-2336-480a-8979-e5de642b4749 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.063223] env[68279]: INFO nova.compute.manager [-] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Took 1.35 seconds to deallocate network for instance. [ 782.091767] env[68279]: DEBUG nova.scheduler.client.report [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.134206] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962922, 'name': ReconfigVM_Task, 'duration_secs': 1.099018} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.134533] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.135336] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31b82198-d695-4eb3-8716-2267ac520aab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.143173] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 782.143173] env[68279]: value = "task-2962923" [ 782.143173] env[68279]: _type = "Task" [ 782.143173] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.152788] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962923, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.385040] env[68279]: DEBUG nova.network.neutron [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Successfully updated port: d0861d0a-53ee-41f0-b051-d3c2f213a4a7 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 782.389085] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5257ee39-9874-8446-8b98-5d7a2d12cf28, 'name': SearchDatastore_Task, 'duration_secs': 0.010694} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.389950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.390082] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/fe92e176-222c-4c46-a254-1c12e21c68d0.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.391473] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97e48acf-601b-4bd1-9030-251c8811fb90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.399772] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 782.399772] env[68279]: value = "task-2962924" [ 782.399772] env[68279]: _type = "Task" [ 782.399772] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.409569] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.430803] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "refresh_cache-43f629d6-bdc3-4345-97ec-26ce2c9d7be7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.431153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquired lock "refresh_cache-43f629d6-bdc3-4345-97ec-26ce2c9d7be7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.431390] env[68279]: DEBUG nova.network.neutron [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.569944] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.597477] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.598032] env[68279]: DEBUG nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 782.600848] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.164s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.601073] env[68279]: DEBUG nova.objects.instance [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lazy-loading 'resources' on Instance uuid 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.608675] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 782.609015] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0db69cb2-ae20-483f-a693-415e778bfdf5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.619310] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 782.619310] env[68279]: value = "task-2962925" [ 782.619310] env[68279]: _type = "Task" [ 782.619310] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.630201] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962925, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.655628] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962923, 'name': Rename_Task, 'duration_secs': 0.151661} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.656021] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.656328] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ca356a9-165c-4967-8868-2035cc04e3d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.664870] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 782.664870] env[68279]: value = "task-2962926" [ 782.664870] env[68279]: _type = "Task" [ 782.664870] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.675190] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.890356] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.890569] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.890762] env[68279]: DEBUG nova.network.neutron [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.911081] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482136} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.911369] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/fe92e176-222c-4c46-a254-1c12e21c68d0.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.911581] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.912134] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcdb4881-14b1-4c02-8338-89580af1b50d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.920621] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 782.920621] env[68279]: value = "task-2962927" [ 782.920621] env[68279]: _type = "Task" [ 782.920621] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.930773] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.950972] env[68279]: DEBUG nova.network.neutron [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.003060] env[68279]: DEBUG nova.network.neutron [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.106541] env[68279]: DEBUG nova.compute.utils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 783.107885] env[68279]: DEBUG nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 783.108080] env[68279]: DEBUG nova.network.neutron [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 783.132201] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962925, 'name': PowerOffVM_Task, 'duration_secs': 0.282468} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.134889] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 783.135128] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.136094] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5897f0e8-99d3-458f-a573-8cd59d182606 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.144300] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 783.145030] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-122e9cbf-ea0f-467f-8f21-236fede61495 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.147348] env[68279]: DEBUG nova.policy [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82d3c8df920f438fbb8d8a512608345d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2da59283302649f8a93ac83ac3c1d1eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 783.175571] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962926, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.231517] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 783.231653] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 783.231762] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleting the datastore file [datastore1] 50e08259-7915-49bb-b137-5cc6e9d53c16 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 783.232114] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9abc46ea-6fee-465d-8833-2eb693dd81cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.242575] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 783.242575] env[68279]: value = "task-2962929" [ 783.242575] env[68279]: _type = "Task" [ 783.242575] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.253668] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.423427] env[68279]: DEBUG nova.network.neutron [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.429685] env[68279]: DEBUG nova.network.neutron [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Successfully created port: 50e28093-1111-493b-88da-b7b9cb496900 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.436769] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074029} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.439290] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.440294] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71514ce9-73a9-4c39-9e7d-24cd11e7cf7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.464610] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/fe92e176-222c-4c46-a254-1c12e21c68d0.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.469613] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0d80698-998b-496c-a5b8-773a79edbf34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.490033] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 783.490033] env[68279]: value = "task-2962930" [ 783.490033] env[68279]: _type = "Task" [ 783.490033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.499497] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962930, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.505165] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Releasing lock "refresh_cache-43f629d6-bdc3-4345-97ec-26ce2c9d7be7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.505569] env[68279]: DEBUG nova.compute.manager [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 783.505757] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.506633] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd6aba9-6457-46d0-8427-f1102a49c018 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.514954] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.515395] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c14efa72-d42a-469a-9bf8-110ccb42d14f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.526313] env[68279]: DEBUG oslo_vmware.api [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 783.526313] env[68279]: value = "task-2962931" [ 783.526313] env[68279]: _type = "Task" [ 783.526313] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.536498] env[68279]: DEBUG oslo_vmware.api [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.611376] env[68279]: DEBUG nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 783.678528] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962926, 'name': PowerOnVM_Task} progress is 96%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.701669] env[68279]: DEBUG nova.network.neutron [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Updating instance_info_cache with network_info: [{"id": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "address": "fa:16:3e:35:36:53", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0861d0a-53", "ovs_interfaceid": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.717170] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d3cfa5-3af3-4859-97b8-f3ce746f3c47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.725551] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44188f84-b34c-491f-805e-22fc0b8043c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.767623] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2c2eb1-d179-4d5d-8ddd-c5fb8e1960ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.781603] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc634d51-dff8-4384-bec6-232257bfc7c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.785912] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187705} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.786195] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 783.786382] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 783.786576] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 783.800344] env[68279]: DEBUG nova.compute.provider_tree [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.845100] env[68279]: DEBUG nova.compute.manager [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Received event network-vif-plugged-d0861d0a-53ee-41f0-b051-d3c2f213a4a7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.845151] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] Acquiring lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.845473] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.845651] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.845708] env[68279]: DEBUG nova.compute.manager [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] No waiting events found dispatching network-vif-plugged-d0861d0a-53ee-41f0-b051-d3c2f213a4a7 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 783.845823] env[68279]: WARNING nova.compute.manager [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Received unexpected event network-vif-plugged-d0861d0a-53ee-41f0-b051-d3c2f213a4a7 for instance with vm_state building and task_state spawning. [ 783.845979] env[68279]: DEBUG nova.compute.manager [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Received event network-changed-d0861d0a-53ee-41f0-b051-d3c2f213a4a7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.846141] env[68279]: DEBUG nova.compute.manager [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Refreshing instance network info cache due to event network-changed-d0861d0a-53ee-41f0-b051-d3c2f213a4a7. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.846305] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] Acquiring lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.000654] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962930, 'name': ReconfigVM_Task, 'duration_secs': 0.299201} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.000969] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Reconfigured VM instance instance-0000002f to attach disk [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/fe92e176-222c-4c46-a254-1c12e21c68d0.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.001687] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b3fa3d9-e1e3-466d-b796-f32262efbcb8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.010034] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 784.010034] env[68279]: value = "task-2962932" [ 784.010034] env[68279]: _type = "Task" [ 784.010034] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.018170] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962932, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.036333] env[68279]: DEBUG oslo_vmware.api [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962931, 'name': PowerOffVM_Task, 'duration_secs': 0.127909} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.036622] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.036802] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.037066] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d08255c-6aba-4b53-9203-a8c3f4f295d7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.067401] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.067642] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.067859] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Deleting the datastore file [datastore2] 43f629d6-bdc3-4345-97ec-26ce2c9d7be7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.068160] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23330ef2-4317-44e9-bf9c-99d9d0afaa3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.075529] env[68279]: DEBUG oslo_vmware.api [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for the task: (returnval){ [ 784.075529] env[68279]: value = "task-2962934" [ 784.075529] env[68279]: _type = "Task" [ 784.075529] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.085056] env[68279]: DEBUG oslo_vmware.api [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.176324] env[68279]: DEBUG oslo_vmware.api [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2962926, 'name': PowerOnVM_Task, 'duration_secs': 1.139518} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.176607] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 784.176776] env[68279]: INFO nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Took 9.05 seconds to spawn the instance on the hypervisor. [ 784.176963] env[68279]: DEBUG nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 784.177716] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdabc978-ae28-4f7a-8f54-7d5c2d142441 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.206316] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.206316] env[68279]: DEBUG nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Instance network_info: |[{"id": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "address": "fa:16:3e:35:36:53", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0861d0a-53", "ovs_interfaceid": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 784.206316] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] Acquired lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.206316] env[68279]: DEBUG nova.network.neutron [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Refreshing network info cache for port d0861d0a-53ee-41f0-b051-d3c2f213a4a7 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.206316] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:36:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0861d0a-53ee-41f0-b051-d3c2f213a4a7', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.215607] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.216688] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.217024] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8cddc5c4-5bb2-4a48-8dff-cf0ad32f6126 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.241766] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.241766] env[68279]: value = "task-2962935" [ 784.241766] env[68279]: _type = "Task" [ 784.241766] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.250527] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962935, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.303524] env[68279]: DEBUG nova.scheduler.client.report [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.520475] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962932, 'name': Rename_Task, 'duration_secs': 0.153623} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.520745] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 784.520985] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e62322a5-d5b6-4064-8f0f-488ec3bf85af {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.527545] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 784.527545] env[68279]: value = "task-2962936" [ 784.527545] env[68279]: _type = "Task" [ 784.527545] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.535756] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.585933] env[68279]: DEBUG oslo_vmware.api [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Task: {'id': task-2962934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116665} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.586213] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.586398] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.586637] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.586838] env[68279]: INFO nova.compute.manager [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Took 1.08 seconds to destroy the instance on the hypervisor. [ 784.587091] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.587290] env[68279]: DEBUG nova.compute.manager [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.587386] env[68279]: DEBUG nova.network.neutron [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.603306] env[68279]: DEBUG nova.network.neutron [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.621988] env[68279]: DEBUG nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 784.647745] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 784.648105] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.648330] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 784.648527] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.648739] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 784.648903] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 784.649125] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 784.649288] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 784.649531] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 784.649652] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 784.649875] env[68279]: DEBUG nova.virt.hardware [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 784.650767] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8284f39a-8949-4603-8cc6-94f43eda7e56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.659930] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3e2f8a-8ec2-4cd6-833c-a5ab6c6a7f50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.695269] env[68279]: INFO nova.compute.manager [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Took 39.15 seconds to build instance. [ 784.754070] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962935, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.809242] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.208s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.812195] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.519s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.813728] env[68279]: INFO nova.compute.claims [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.836019] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 784.836375] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.837066] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 784.837066] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.837066] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 784.837066] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 784.837390] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 784.837523] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 784.837848] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 784.838318] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 784.838934] env[68279]: DEBUG nova.virt.hardware [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 784.840550] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e58da4c-b6a1-42cc-9855-f5bcef07e8cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.850556] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4a37e7-8a6e-402e-82a5-a53f39c3c526 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.857773] env[68279]: INFO nova.scheduler.client.report [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Deleted allocations for instance 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f [ 785.604455] env[68279]: DEBUG nova.network.neutron [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.605385] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1f2938f4-5fec-4465-806d-2ac168985d49 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.916s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.607662] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:cc:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab5d6d31-7d88-47ee-a53a-80e39c3e2a72', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.614816] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.617910] env[68279]: DEBUG nova.network.neutron [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Successfully updated port: 50e28093-1111-493b-88da-b7b9cb496900 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 785.629828] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.629828] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9806fb93-9e5e-46df-bbb7-15a7a39d8fb5 tempest-ServerDiagnosticsTest-1751326458 tempest-ServerDiagnosticsTest-1751326458-project-member] Lock "33f3fc4a-319b-4dd9-90b5-05ee5483ac7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.111s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.630996] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d4d05f1-171d-4736-a44e-c20a6c3a90da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.657534] env[68279]: DEBUG oslo_vmware.api [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962936, 'name': PowerOnVM_Task, 'duration_secs': 0.522782} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.657897] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962935, 'name': CreateVM_Task, 'duration_secs': 0.598709} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.659286] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 785.663147] env[68279]: INFO nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Took 7.70 seconds to spawn the instance on the hypervisor. [ 785.663147] env[68279]: DEBUG nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 785.663147] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.663147] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.663147] env[68279]: value = "task-2962937" [ 785.663147] env[68279]: _type = "Task" [ 785.663147] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.663147] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b411d414-f23a-41a3-87c4-175d984529b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.664548] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.664702] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.665719] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 785.665816] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-050051e9-62dc-4461-9246-9b9b2ad0822c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.680542] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 785.680542] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b797cd-7c0c-31fc-c41f-5722f5986195" [ 785.680542] env[68279]: _type = "Task" [ 785.680542] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.683746] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962937, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.697746] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b797cd-7c0c-31fc-c41f-5722f5986195, 'name': SearchDatastore_Task, 'duration_secs': 0.012707} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.698175] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.698311] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 785.698546] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.698685] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.699155] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.699223] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fcb65d2-daab-419a-81df-8a57837bfc71 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.710330] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.710517] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 785.711261] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef21f30-8355-40d8-8d69-abc30bb512ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.721760] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 785.721760] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52775f09-7f52-ba68-c2bc-46d53a81ae0b" [ 785.721760] env[68279]: _type = "Task" [ 785.721760] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.731757] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52775f09-7f52-ba68-c2bc-46d53a81ae0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.859228] env[68279]: DEBUG nova.network.neutron [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Updated VIF entry in instance network info cache for port d0861d0a-53ee-41f0-b051-d3c2f213a4a7. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 785.859598] env[68279]: DEBUG nova.network.neutron [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Updating instance_info_cache with network_info: [{"id": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "address": "fa:16:3e:35:36:53", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0861d0a-53", "ovs_interfaceid": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.939162] env[68279]: DEBUG nova.compute.manager [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Received event network-vif-plugged-50e28093-1111-493b-88da-b7b9cb496900 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.939162] env[68279]: DEBUG oslo_concurrency.lockutils [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] Acquiring lock "ed86ef15-1941-40c5-8178-344a7b401b58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.939162] env[68279]: DEBUG oslo_concurrency.lockutils [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] Lock "ed86ef15-1941-40c5-8178-344a7b401b58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.939484] env[68279]: DEBUG oslo_concurrency.lockutils [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] Lock "ed86ef15-1941-40c5-8178-344a7b401b58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.939484] env[68279]: DEBUG nova.compute.manager [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] No waiting events found dispatching network-vif-plugged-50e28093-1111-493b-88da-b7b9cb496900 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 785.939628] env[68279]: WARNING nova.compute.manager [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Received unexpected event network-vif-plugged-50e28093-1111-493b-88da-b7b9cb496900 for instance with vm_state building and task_state spawning. [ 785.939718] env[68279]: DEBUG nova.compute.manager [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Received event network-changed-50e28093-1111-493b-88da-b7b9cb496900 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.939822] env[68279]: DEBUG nova.compute.manager [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Refreshing instance network info cache due to event network-changed-50e28093-1111-493b-88da-b7b9cb496900. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 785.940050] env[68279]: DEBUG oslo_concurrency.lockutils [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] Acquiring lock "refresh_cache-ed86ef15-1941-40c5-8178-344a7b401b58" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.940778] env[68279]: DEBUG oslo_concurrency.lockutils [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] Acquired lock "refresh_cache-ed86ef15-1941-40c5-8178-344a7b401b58" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.940778] env[68279]: DEBUG nova.network.neutron [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Refreshing network info cache for port 50e28093-1111-493b-88da-b7b9cb496900 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.130664] env[68279]: DEBUG nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 786.147990] env[68279]: INFO nova.compute.manager [-] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Took 1.56 seconds to deallocate network for instance. [ 786.173063] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "refresh_cache-ed86ef15-1941-40c5-8178-344a7b401b58" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.183274] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962937, 'name': CreateVM_Task, 'duration_secs': 0.433264} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.183446] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.184069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.185241] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.185241] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 786.188186] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c249f681-4ce1-4fba-b3ce-4876fafe2f0d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.194988] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 786.194988] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529b3cbc-3c88-3480-a0e5-1ee21c53495f" [ 786.194988] env[68279]: _type = "Task" [ 786.194988] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.198062] env[68279]: INFO nova.compute.manager [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Took 37.22 seconds to build instance. [ 786.209176] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529b3cbc-3c88-3480-a0e5-1ee21c53495f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.235631] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52775f09-7f52-ba68-c2bc-46d53a81ae0b, 'name': SearchDatastore_Task, 'duration_secs': 0.011468} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.236765] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-537d695d-e00c-4599-824b-d09ad6e9ef41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.245192] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 786.245192] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e5241a-01e8-db47-029b-799fe76c7e96" [ 786.245192] env[68279]: _type = "Task" [ 786.245192] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.256071] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e5241a-01e8-db47-029b-799fe76c7e96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.363034] env[68279]: DEBUG oslo_concurrency.lockutils [req-5f39ab65-cb03-431c-b7af-e45727cd6217 req-c655c983-f302-4361-902a-496d2addae36 service nova] Releasing lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.502343] env[68279]: DEBUG nova.network.neutron [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.611024] env[68279]: DEBUG nova.network.neutron [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.653871] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.657823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.702801] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e645411e-1c8b-4d78-ac98-ecd3e9c63c07 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.797s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.710143] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529b3cbc-3c88-3480-a0e5-1ee21c53495f, 'name': SearchDatastore_Task, 'duration_secs': 0.017771} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.710511] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.710733] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.710977] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.711170] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.711342] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.711600] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e9beeb4-0ff1-44a9-8ed3-90a64dc4358c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.721948] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.722148] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.723155] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae7ad19-eb63-430a-9404-dc0875083b50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.729782] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 786.729782] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52846a58-c757-c74e-0a6d-4c785cc47549" [ 786.729782] env[68279]: _type = "Task" [ 786.729782] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.741259] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52846a58-c757-c74e-0a6d-4c785cc47549, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.743673] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd2bca9-14fb-4eab-9c5c-e8bd4ce9a67d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.756935] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931038ca-64f3-4379-b47b-d1ed78e39b13 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.760767] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e5241a-01e8-db47-029b-799fe76c7e96, 'name': SearchDatastore_Task, 'duration_secs': 0.01503} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.761066] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.761340] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/cfaee7e2-6929-4d8c-8614-e19e0055f2fb.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 786.761880] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dca331e-d9e3-4218-828f-0f5275890c47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.792220] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223413d8-75ed-4de2-9c50-d32383c8eeb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.794961] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 786.794961] env[68279]: value = "task-2962938" [ 786.794961] env[68279]: _type = "Task" [ 786.794961] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.801992] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76153675-54c0-4ae0-8f4f-d0aba99e9e58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.809404] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.824498] env[68279]: DEBUG nova.compute.provider_tree [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.113961] env[68279]: DEBUG oslo_concurrency.lockutils [req-02152d13-fb60-4a76-9f6e-d792fd6d2b5d req-393a6fe3-3a4c-4e47-9246-d6e456560874 service nova] Releasing lock "refresh_cache-ed86ef15-1941-40c5-8178-344a7b401b58" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.114365] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquired lock "refresh_cache-ed86ef15-1941-40c5-8178-344a7b401b58" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.114530] env[68279]: DEBUG nova.network.neutron [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.211660] env[68279]: DEBUG nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 787.244460] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52846a58-c757-c74e-0a6d-4c785cc47549, 'name': SearchDatastore_Task, 'duration_secs': 0.011655} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.245250] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11154523-4a87-46bb-9657-d535a9524666 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.251322] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 787.251322] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288cdc4-1359-517a-2d0a-19a06c4d0e0f" [ 787.251322] env[68279]: _type = "Task" [ 787.251322] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.259812] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288cdc4-1359-517a-2d0a-19a06c4d0e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.306468] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962938, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.329718] env[68279]: DEBUG nova.scheduler.client.report [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.335705] env[68279]: DEBUG nova.compute.manager [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 787.674594] env[68279]: DEBUG nova.network.neutron [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.735941] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.764205] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288cdc4-1359-517a-2d0a-19a06c4d0e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.015552} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.764765] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.765217] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.765610] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-353019cd-8de8-473e-9788-0974f1b91672 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.776010] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 787.776010] env[68279]: value = "task-2962939" [ 787.776010] env[68279]: _type = "Task" [ 787.776010] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.785664] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.805705] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.911386} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.807148] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/cfaee7e2-6929-4d8c-8614-e19e0055f2fb.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.807574] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.807845] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b06e1dd1-1a68-4139-9371-8983ae347c1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.816086] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 787.816086] env[68279]: value = "task-2962940" [ 787.816086] env[68279]: _type = "Task" [ 787.816086] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.825522] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.834247] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.023s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.834899] env[68279]: DEBUG nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 787.837951] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.089s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.838200] env[68279]: DEBUG nova.objects.instance [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lazy-loading 'resources' on Instance uuid 4e157792-f910-492c-ab29-dd3f86cb96a8 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 787.857670] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.873348] env[68279]: DEBUG nova.network.neutron [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Updating instance_info_cache with network_info: [{"id": "50e28093-1111-493b-88da-b7b9cb496900", "address": "fa:16:3e:0c:49:eb", "network": {"id": "448834b8-29a8-4cf3-8ce1-ac215a577719", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1897633906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2da59283302649f8a93ac83ac3c1d1eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e28093-11", "ovs_interfaceid": "50e28093-1111-493b-88da-b7b9cb496900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.286369] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962939, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.326406] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071379} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.326682] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.327470] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e83492d-2748-42ef-8c28-1c9d83f18c43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.346847] env[68279]: DEBUG nova.compute.utils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 788.361249] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/cfaee7e2-6929-4d8c-8614-e19e0055f2fb.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.362155] env[68279]: DEBUG nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 788.362844] env[68279]: DEBUG nova.network.neutron [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 788.364893] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7ab655f-74e0-430c-86cc-859755824c36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.380507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Releasing lock "refresh_cache-ed86ef15-1941-40c5-8178-344a7b401b58" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.380749] env[68279]: DEBUG nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Instance network_info: |[{"id": "50e28093-1111-493b-88da-b7b9cb496900", "address": "fa:16:3e:0c:49:eb", "network": {"id": "448834b8-29a8-4cf3-8ce1-ac215a577719", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1897633906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2da59283302649f8a93ac83ac3c1d1eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e28093-11", "ovs_interfaceid": "50e28093-1111-493b-88da-b7b9cb496900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 788.383853] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:49:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbd7899c-c96e-47fc-9141-5803b646917a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50e28093-1111-493b-88da-b7b9cb496900', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.391171] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Creating folder: Project (2da59283302649f8a93ac83ac3c1d1eb). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 788.392403] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de46afdc-b63f-46cb-9b30-28f869591039 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.397645] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 788.397645] env[68279]: value = "task-2962941" [ 788.397645] env[68279]: _type = "Task" [ 788.397645] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.409251] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962941, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.410021] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Created folder: Project (2da59283302649f8a93ac83ac3c1d1eb) in parent group-v594445. [ 788.410021] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Creating folder: Instances. Parent ref: group-v594588. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 788.411024] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f03d3adb-6d06-4137-9b51-5f9d9412db35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.436941] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Created folder: Instances in parent group-v594588. [ 788.438084] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.441104] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 788.441543] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6739789f-12dd-488f-9ce1-b981c923918c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.463232] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.463232] env[68279]: value = "task-2962944" [ 788.463232] env[68279]: _type = "Task" [ 788.463232] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.473218] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962944, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.476056] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.476476] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.484386] env[68279]: DEBUG nova.policy [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f2a76467ed144c4af3094cac933f37e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a00666a2d2b04f789b1066eeae5a8a64', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 788.790490] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577141} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.790787] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.791071] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.791291] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15befe2a-04a1-446b-b7fb-3604b7b058fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.802466] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 788.802466] env[68279]: value = "task-2962945" [ 788.802466] env[68279]: _type = "Task" [ 788.802466] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.814702] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.862892] env[68279]: DEBUG nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 788.909423] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.975199] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962944, 'name': CreateVM_Task, 'duration_secs': 0.37843} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.979937] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.987524] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.987780] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.988258] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 788.989452] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20e26547-95fb-45d7-ab50-27ca45be14dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.991980] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.991980] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.993454] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.997282] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.999419] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.999524] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 788.999524] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520da19e-f852-2cfc-7c28-1efb25bd82f4" [ 788.999524] env[68279]: _type = "Task" [ 788.999524] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.999782] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.999962] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 789.000531] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.015806] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520da19e-f852-2cfc-7c28-1efb25bd82f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.027438] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fce26f-f158-4cfe-9825-8a0869254c2d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.036229] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f472e5f-c0d3-49fc-aba5-fa52ad75ad36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.040436] env[68279]: DEBUG nova.network.neutron [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Successfully created port: d6e29e5a-01c4-4c55-bb4f-dbac8943124c {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.072448] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ef6c06-a8aa-436f-840b-78316856b6ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.080066] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150c7380-8b3e-4041-97cf-48a4786821d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.094024] env[68279]: DEBUG nova.compute.provider_tree [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.320163] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248311} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.320567] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.324029] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843a6407-70fb-4efa-aa03-cea38ba57551 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.347134] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.347499] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0b07c94-0337-4df2-be30-c7fdd4cdfed3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.375021] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 789.375021] env[68279]: value = "task-2962946" [ 789.375021] env[68279]: _type = "Task" [ 789.375021] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.409559] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.508543] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.513950] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520da19e-f852-2cfc-7c28-1efb25bd82f4, 'name': SearchDatastore_Task, 'duration_secs': 0.044829} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.513950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.513950] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 789.513950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.513950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.513950] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 789.514947] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99136799-6153-4298-99f2-040831eb128a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.527793] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.528017] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 789.528844] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c699d6f-1f36-4696-81f4-2a070aa9f892 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.534697] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 789.534697] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525fce7d-17d1-e498-c07b-f38f952af33b" [ 789.534697] env[68279]: _type = "Task" [ 789.534697] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.543670] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525fce7d-17d1-e498-c07b-f38f952af33b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.596892] env[68279]: DEBUG nova.scheduler.client.report [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.875852] env[68279]: DEBUG nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 789.888584] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962946, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.909767] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962941, 'name': ReconfigVM_Task, 'duration_secs': 1.124062} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.911943] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 789.912188] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.912346] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 789.912528] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.912674] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 789.912822] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 789.913039] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 789.913209] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 789.913376] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 789.913592] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 789.913953] env[68279]: DEBUG nova.virt.hardware [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 789.914129] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Reconfigured VM instance instance-00000030 to attach disk [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/cfaee7e2-6929-4d8c-8614-e19e0055f2fb.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.915482] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb57a7e-a584-40ea-bd68-8509a8d98b08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.917650] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33dad109-8fa1-43b0-a345-2102cc321ca8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.927857] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a30bc0-c150-48de-8135-ea480baab182 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.931959] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 789.931959] env[68279]: value = "task-2962947" [ 789.931959] env[68279]: _type = "Task" [ 789.931959] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.949202] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962947, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.047239] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525fce7d-17d1-e498-c07b-f38f952af33b, 'name': SearchDatastore_Task, 'duration_secs': 0.018814} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.048426] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef28a3ee-fc67-48f8-8168-089c8d161621 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.054989] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 790.054989] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f4cac9-fb76-632a-34ce-0006ca28f7ce" [ 790.054989] env[68279]: _type = "Task" [ 790.054989] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.063602] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f4cac9-fb76-632a-34ce-0006ca28f7ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.101736] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.264s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.104559] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.553s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.105586] env[68279]: INFO nova.compute.claims [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.126174] env[68279]: INFO nova.scheduler.client.report [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Deleted allocations for instance 4e157792-f910-492c-ab29-dd3f86cb96a8 [ 790.390461] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962946, 'name': ReconfigVM_Task, 'duration_secs': 0.68959} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.390862] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 50e08259-7915-49bb-b137-5cc6e9d53c16/50e08259-7915-49bb-b137-5cc6e9d53c16.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.391475] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-790ebae8-9d4a-4341-a524-a22702b94a84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.401903] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 790.401903] env[68279]: value = "task-2962948" [ 790.401903] env[68279]: _type = "Task" [ 790.401903] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.414293] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962948, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.446228] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962947, 'name': Rename_Task, 'duration_secs': 0.189523} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.446848] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.447216] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a63c51b-2a42-4816-9f2b-bc57e4a1854b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.456189] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 790.456189] env[68279]: value = "task-2962949" [ 790.456189] env[68279]: _type = "Task" [ 790.456189] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.469129] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.566674] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f4cac9-fb76-632a-34ce-0006ca28f7ce, 'name': SearchDatastore_Task, 'duration_secs': 0.015011} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.566991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.567605] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] ed86ef15-1941-40c5-8178-344a7b401b58/ed86ef15-1941-40c5-8178-344a7b401b58.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 790.567928] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9da2943d-cb64-4017-9f76-b090096875f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.577495] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 790.577495] env[68279]: value = "task-2962950" [ 790.577495] env[68279]: _type = "Task" [ 790.577495] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.589695] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.636704] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6ef5065-d598-4bf5-8bf9-9b96bf64c7b2 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "4e157792-f910-492c-ab29-dd3f86cb96a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.593s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.671077] env[68279]: DEBUG nova.compute.manager [req-3b76b27a-cd35-4daa-b4a7-a5ddda67cd76 req-a96a6a7f-2f7b-4e4d-ae83-4c885c85381f service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Received event network-vif-plugged-d6e29e5a-01c4-4c55-bb4f-dbac8943124c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.671202] env[68279]: DEBUG oslo_concurrency.lockutils [req-3b76b27a-cd35-4daa-b4a7-a5ddda67cd76 req-a96a6a7f-2f7b-4e4d-ae83-4c885c85381f service nova] Acquiring lock "d61b2c4f-942a-4e29-8cac-11bc0750605a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.671418] env[68279]: DEBUG oslo_concurrency.lockutils [req-3b76b27a-cd35-4daa-b4a7-a5ddda67cd76 req-a96a6a7f-2f7b-4e4d-ae83-4c885c85381f service nova] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.671589] env[68279]: DEBUG oslo_concurrency.lockutils [req-3b76b27a-cd35-4daa-b4a7-a5ddda67cd76 req-a96a6a7f-2f7b-4e4d-ae83-4c885c85381f service nova] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.671755] env[68279]: DEBUG nova.compute.manager [req-3b76b27a-cd35-4daa-b4a7-a5ddda67cd76 req-a96a6a7f-2f7b-4e4d-ae83-4c885c85381f service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] No waiting events found dispatching network-vif-plugged-d6e29e5a-01c4-4c55-bb4f-dbac8943124c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.671921] env[68279]: WARNING nova.compute.manager [req-3b76b27a-cd35-4daa-b4a7-a5ddda67cd76 req-a96a6a7f-2f7b-4e4d-ae83-4c885c85381f service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Received unexpected event network-vif-plugged-d6e29e5a-01c4-4c55-bb4f-dbac8943124c for instance with vm_state building and task_state spawning. [ 790.837090] env[68279]: DEBUG nova.network.neutron [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Successfully updated port: d6e29e5a-01c4-4c55-bb4f-dbac8943124c {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 790.913640] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962948, 'name': Rename_Task, 'duration_secs': 0.242033} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.914039] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.914321] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-355b9e69-a15a-407e-ab75-b1c660d1549f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.927176] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 790.927176] env[68279]: value = "task-2962951" [ 790.927176] env[68279]: _type = "Task" [ 790.927176] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.936575] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962951, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.969584] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962949, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.090601] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962950, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.340029] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "refresh_cache-d61b2c4f-942a-4e29-8cac-11bc0750605a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.340029] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired lock "refresh_cache-d61b2c4f-942a-4e29-8cac-11bc0750605a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.340029] env[68279]: DEBUG nova.network.neutron [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.440547] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962951, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.468568] env[68279]: DEBUG oslo_vmware.api [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962949, 'name': PowerOnVM_Task, 'duration_secs': 0.78346} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.472370] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.472744] env[68279]: INFO nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Took 9.48 seconds to spawn the instance on the hypervisor. [ 791.473061] env[68279]: DEBUG nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.474689] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae074f80-2ac6-465e-9480-ad5d77b9e009 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.589600] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962950, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618299} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.589885] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] ed86ef15-1941-40c5-8178-344a7b401b58/ed86ef15-1941-40c5-8178-344a7b401b58.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.590121] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.590382] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a983ce61-8365-47e3-b978-e3b427ddd9ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.599544] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 791.599544] env[68279]: value = "task-2962952" [ 791.599544] env[68279]: _type = "Task" [ 791.599544] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.610561] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962952, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.678096] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4a9b7c-4d30-40f5-affe-14cb91d2b057 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.688434] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5693b65-1611-4bd9-b9c6-44bd53d6b9cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.728049] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27608190-114d-45fd-954d-1a4b4552b87d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.737254] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97780fd8-da8a-4304-8300-93e20e3e3372 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.753150] env[68279]: DEBUG nova.compute.provider_tree [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.875263] env[68279]: DEBUG nova.network.neutron [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.940360] env[68279]: DEBUG oslo_vmware.api [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962951, 'name': PowerOnVM_Task, 'duration_secs': 0.777318} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.940742] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.941014] env[68279]: DEBUG nova.compute.manager [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.942039] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6a25f0-1edb-4dd0-b18e-e9c920808441 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.001457] env[68279]: INFO nova.compute.manager [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Took 38.74 seconds to build instance. [ 792.109796] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962952, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085355} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.110101] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.110883] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86363cec-6715-404c-abd7-facf881961b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.116421] env[68279]: DEBUG nova.network.neutron [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Updating instance_info_cache with network_info: [{"id": "d6e29e5a-01c4-4c55-bb4f-dbac8943124c", "address": "fa:16:3e:6f:b0:ee", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e29e5a-01", "ovs_interfaceid": "d6e29e5a-01c4-4c55-bb4f-dbac8943124c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.134865] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] ed86ef15-1941-40c5-8178-344a7b401b58/ed86ef15-1941-40c5-8178-344a7b401b58.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.136096] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Releasing lock "refresh_cache-d61b2c4f-942a-4e29-8cac-11bc0750605a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.136385] env[68279]: DEBUG nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Instance network_info: |[{"id": "d6e29e5a-01c4-4c55-bb4f-dbac8943124c", "address": "fa:16:3e:6f:b0:ee", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e29e5a-01", "ovs_interfaceid": "d6e29e5a-01c4-4c55-bb4f-dbac8943124c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 792.136634] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eda4b9d0-85d5-43f5-8946-47619e7ff36c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.151430] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:b0:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6e29e5a-01c4-4c55-bb4f-dbac8943124c', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.158694] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Creating folder: Project (a00666a2d2b04f789b1066eeae5a8a64). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.159854] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7faf818a-3900-41d4-8a46-4340d3a40664 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.168552] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 792.168552] env[68279]: value = "task-2962954" [ 792.168552] env[68279]: _type = "Task" [ 792.168552] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.178829] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Created folder: Project (a00666a2d2b04f789b1066eeae5a8a64) in parent group-v594445. [ 792.179065] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Creating folder: Instances. Parent ref: group-v594591. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.179318] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5daa2a17-4b90-4fc5-aead-e4c42b76f972 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.184307] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962954, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.194659] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Created folder: Instances in parent group-v594591. [ 792.194935] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.195152] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.195366] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90678009-c383-4354-8937-d0636488b22b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.217307] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.217307] env[68279]: value = "task-2962956" [ 792.217307] env[68279]: _type = "Task" [ 792.217307] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.228895] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962956, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.257193] env[68279]: DEBUG nova.scheduler.client.report [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.461994] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.503619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b1173086-ccf9-4e31-bf03-28b0b9076ce6 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.207s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.679993] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962954, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.729202] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962956, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.762817] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.763366] env[68279]: DEBUG nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 792.766123] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.598s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.766308] env[68279]: DEBUG nova.objects.instance [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 792.855902] env[68279]: DEBUG nova.compute.manager [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Received event network-changed-d6e29e5a-01c4-4c55-bb4f-dbac8943124c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.856025] env[68279]: DEBUG nova.compute.manager [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Refreshing instance network info cache due to event network-changed-d6e29e5a-01c4-4c55-bb4f-dbac8943124c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 792.856270] env[68279]: DEBUG oslo_concurrency.lockutils [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] Acquiring lock "refresh_cache-d61b2c4f-942a-4e29-8cac-11bc0750605a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.856346] env[68279]: DEBUG oslo_concurrency.lockutils [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] Acquired lock "refresh_cache-d61b2c4f-942a-4e29-8cac-11bc0750605a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.857039] env[68279]: DEBUG nova.network.neutron [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Refreshing network info cache for port d6e29e5a-01c4-4c55-bb4f-dbac8943124c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 793.007239] env[68279]: DEBUG nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 793.108950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "6b778e98-12c2-42a5-a772-06ea32d090b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.110817] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.110817] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "6b778e98-12c2-42a5-a772-06ea32d090b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.110817] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.110817] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.112930] env[68279]: INFO nova.compute.manager [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Terminating instance [ 793.180119] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962954, 'name': ReconfigVM_Task, 'duration_secs': 0.866924} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.180820] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Reconfigured VM instance instance-00000031 to attach disk [datastore2] ed86ef15-1941-40c5-8178-344a7b401b58/ed86ef15-1941-40c5-8178-344a7b401b58.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.181389] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63fca9f2-238f-4362-ada4-dbf406266c69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.191279] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 793.191279] env[68279]: value = "task-2962957" [ 793.191279] env[68279]: _type = "Task" [ 793.191279] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.201823] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962957, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.228476] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962956, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.274214] env[68279]: DEBUG nova.compute.utils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.279025] env[68279]: DEBUG nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 793.279025] env[68279]: DEBUG nova.network.neutron [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.358133] env[68279]: DEBUG nova.policy [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a7de63f07f24606ba4927a66d544923', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fd4e00dfba449c5800a22fc37f2c40b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 793.424787] env[68279]: INFO nova.compute.manager [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Rescuing [ 793.424903] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.425043] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.425215] env[68279]: DEBUG nova.network.neutron [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.541062] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.604421] env[68279]: DEBUG nova.network.neutron [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Updated VIF entry in instance network info cache for port d6e29e5a-01c4-4c55-bb4f-dbac8943124c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.604945] env[68279]: DEBUG nova.network.neutron [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Updating instance_info_cache with network_info: [{"id": "d6e29e5a-01c4-4c55-bb4f-dbac8943124c", "address": "fa:16:3e:6f:b0:ee", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e29e5a-01", "ovs_interfaceid": "d6e29e5a-01c4-4c55-bb4f-dbac8943124c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.622018] env[68279]: DEBUG nova.compute.manager [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 793.622018] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.622018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c49cb3-730c-4b5c-9114-4fa77345b8a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.631923] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.632365] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb9228b1-ecae-4fb5-bcda-2cd2b1a0e2fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.642534] env[68279]: DEBUG oslo_vmware.api [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 793.642534] env[68279]: value = "task-2962958" [ 793.642534] env[68279]: _type = "Task" [ 793.642534] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.655721] env[68279]: DEBUG oslo_vmware.api [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.704658] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962957, 'name': Rename_Task, 'duration_secs': 0.203403} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.704986] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.705265] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39a704a4-7411-474c-8903-7fd7d1a533ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.713893] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 793.713893] env[68279]: value = "task-2962959" [ 793.713893] env[68279]: _type = "Task" [ 793.713893] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.725529] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.731712] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962956, 'name': CreateVM_Task, 'duration_secs': 1.03892} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.731901] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 793.732775] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.732999] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.733448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 793.733879] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92785793-ede6-4d0e-bb90-36f5e692d2d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.739778] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 793.739778] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bbc05a-18a4-295c-d17a-7fd3eb875588" [ 793.739778] env[68279]: _type = "Task" [ 793.739778] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.754035] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bbc05a-18a4-295c-d17a-7fd3eb875588, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.778029] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b62a806f-4eb6-4892-9ffe-cb1fdabc4c9a tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.779352] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.372s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.779607] env[68279]: DEBUG nova.objects.instance [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lazy-loading 'resources' on Instance uuid 05b94aa5-3efc-4790-9d98-c2658b8e8b4b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 793.783566] env[68279]: DEBUG nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 793.858394] env[68279]: DEBUG nova.network.neutron [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Successfully created port: 25dc4e9c-46b3-42bc-af52-a7cbc468c28a {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.108720] env[68279]: DEBUG oslo_concurrency.lockutils [req-3dc38354-75b5-4e38-83db-9b4e0419e409 req-94d16ea3-eb00-4ed1-9277-b528a8016621 service nova] Releasing lock "refresh_cache-d61b2c4f-942a-4e29-8cac-11bc0750605a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.154674] env[68279]: DEBUG oslo_vmware.api [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962958, 'name': PowerOffVM_Task, 'duration_secs': 0.345959} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.154950] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.155148] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.155413] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2481d698-2e5f-43cc-a57d-de79b5554790 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.158720] env[68279]: DEBUG nova.network.neutron [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Updating instance_info_cache with network_info: [{"id": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "address": "fa:16:3e:35:36:53", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0861d0a-53", "ovs_interfaceid": "d0861d0a-53ee-41f0-b051-d3c2f213a4a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.229600] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962959, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.232446] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.232665] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.232854] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Deleting the datastore file [datastore2] 6b778e98-12c2-42a5-a772-06ea32d090b8 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.233178] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7a95398-7b96-4982-83a6-fbf1e5e75264 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.241608] env[68279]: DEBUG oslo_vmware.api [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for the task: (returnval){ [ 794.241608] env[68279]: value = "task-2962961" [ 794.241608] env[68279]: _type = "Task" [ 794.241608] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.256830] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bbc05a-18a4-295c-d17a-7fd3eb875588, 'name': SearchDatastore_Task, 'duration_secs': 0.01337} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.259998] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.260377] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 794.260624] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.261256] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.261719] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.262047] env[68279]: DEBUG oslo_vmware.api [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.262346] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3834c2d6-f91b-4fe0-9750-227717c48136 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.284731] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.284928] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 794.285883] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc46adc5-7957-4f9b-b691-cc66e82ab14f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.305760] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 794.305760] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529f6d11-717d-0cb1-aef2-bb978617ca0a" [ 794.305760] env[68279]: _type = "Task" [ 794.305760] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.314468] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529f6d11-717d-0cb1-aef2-bb978617ca0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.662132] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-cfaee7e2-6929-4d8c-8614-e19e0055f2fb" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.729564] env[68279]: DEBUG oslo_vmware.api [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962959, 'name': PowerOnVM_Task, 'duration_secs': 0.562377} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.729836] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.730126] env[68279]: INFO nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Took 10.11 seconds to spawn the instance on the hypervisor. [ 794.730322] env[68279]: DEBUG nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.731132] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ad523b-4a35-479e-91ef-12bd9889b647 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.759788] env[68279]: DEBUG oslo_vmware.api [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Task: {'id': task-2962961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231815} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.759788] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.759788] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.759788] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.759788] env[68279]: INFO nova.compute.manager [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 794.759788] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.759788] env[68279]: DEBUG nova.compute.manager [-] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 794.759788] env[68279]: DEBUG nova.network.neutron [-] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.802444] env[68279]: DEBUG nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 794.820450] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529f6d11-717d-0cb1-aef2-bb978617ca0a, 'name': SearchDatastore_Task, 'duration_secs': 0.015057} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.821472] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c57a09b-0703-4822-88b0-08c1a2ed0fd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.829999] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 794.829999] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5253f6ef-86cd-f9eb-8360-4c743da9fd7c" [ 794.829999] env[68279]: _type = "Task" [ 794.829999] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.834776] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 794.835646] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.835646] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 794.835646] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.835646] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 794.835646] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 794.835966] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 794.836018] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 794.836161] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 794.836315] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 794.836797] env[68279]: DEBUG nova.virt.hardware [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 794.837692] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf3805f-993e-4865-98c3-e198360d001a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.850338] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5253f6ef-86cd-f9eb-8360-4c743da9fd7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.853361] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a1a046-85fc-4a99-8342-ef9f328ca8d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.904358] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03765ca-9e22-49e4-91b7-5b5153fd04cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.912643] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f21de2c-07c2-4437-9f98-c4d1ecdda089 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.946466] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dca4b57-8637-4bfe-8ed5-c16ed1257cd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.959024] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81898c4-740f-471e-b16c-b0325077df32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.980708] env[68279]: DEBUG nova.compute.provider_tree [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.188821] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.189142] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.267184] env[68279]: INFO nova.compute.manager [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Took 41.64 seconds to build instance. [ 795.341810] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5253f6ef-86cd-f9eb-8360-4c743da9fd7c, 'name': SearchDatastore_Task, 'duration_secs': 0.035443} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.342155] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.345017] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] d61b2c4f-942a-4e29-8cac-11bc0750605a/d61b2c4f-942a-4e29-8cac-11bc0750605a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 795.345017] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48a9df7a-d45d-483a-8c54-8d78d9532d50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.351252] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 795.351252] env[68279]: value = "task-2962962" [ 795.351252] env[68279]: _type = "Task" [ 795.351252] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.361161] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.457029] env[68279]: DEBUG nova.compute.manager [req-f977c70a-5408-4930-9c74-16871509eb5e req-c9adfff9-d20e-4adf-9ac4-6651f3d120dc service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Received event network-vif-deleted-e402e4c3-f9cc-469b-a10e-86b1f89eddad {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.457029] env[68279]: INFO nova.compute.manager [req-f977c70a-5408-4930-9c74-16871509eb5e req-c9adfff9-d20e-4adf-9ac4-6651f3d120dc service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Neutron deleted interface e402e4c3-f9cc-469b-a10e-86b1f89eddad; detaching it from the instance and deleting it from the info cache [ 795.457029] env[68279]: DEBUG nova.network.neutron [req-f977c70a-5408-4930-9c74-16871509eb5e req-c9adfff9-d20e-4adf-9ac4-6651f3d120dc service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.484714] env[68279]: DEBUG nova.scheduler.client.report [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.548734] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "7858163d-8e68-4565-b1e0-ecd2e9be350d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.549052] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.549263] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "7858163d-8e68-4565-b1e0-ecd2e9be350d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.549447] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.549612] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.551883] env[68279]: INFO nova.compute.manager [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Terminating instance [ 795.561810] env[68279]: DEBUG nova.network.neutron [-] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.769997] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6758063-91bc-4673-8201-524a252afd79 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "ed86ef15-1941-40c5-8178-344a7b401b58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.440s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.867461] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962962, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.931373] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.931699] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.961558] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a255766-0fb3-4e12-9798-5f7da454440e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.979934] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a068d41-4e50-44f6-ad23-7b533f203296 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.995595] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.216s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.998931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.229s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.001941] env[68279]: INFO nova.compute.claims [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.033129] env[68279]: DEBUG nova.compute.manager [req-f977c70a-5408-4930-9c74-16871509eb5e req-c9adfff9-d20e-4adf-9ac4-6651f3d120dc service nova] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Detach interface failed, port_id=e402e4c3-f9cc-469b-a10e-86b1f89eddad, reason: Instance 6b778e98-12c2-42a5-a772-06ea32d090b8 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 796.035182] env[68279]: INFO nova.scheduler.client.report [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Deleted allocations for instance 05b94aa5-3efc-4790-9d98-c2658b8e8b4b [ 796.036925] env[68279]: DEBUG nova.compute.manager [req-13806a58-aa78-4521-8a3f-48c9f65e79a1 req-4c7e8d4a-c1d2-4982-919a-3dca73236278 service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Received event network-vif-plugged-25dc4e9c-46b3-42bc-af52-a7cbc468c28a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.037141] env[68279]: DEBUG oslo_concurrency.lockutils [req-13806a58-aa78-4521-8a3f-48c9f65e79a1 req-4c7e8d4a-c1d2-4982-919a-3dca73236278 service nova] Acquiring lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.037351] env[68279]: DEBUG oslo_concurrency.lockutils [req-13806a58-aa78-4521-8a3f-48c9f65e79a1 req-4c7e8d4a-c1d2-4982-919a-3dca73236278 service nova] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.037518] env[68279]: DEBUG oslo_concurrency.lockutils [req-13806a58-aa78-4521-8a3f-48c9f65e79a1 req-4c7e8d4a-c1d2-4982-919a-3dca73236278 service nova] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.037683] env[68279]: DEBUG nova.compute.manager [req-13806a58-aa78-4521-8a3f-48c9f65e79a1 req-4c7e8d4a-c1d2-4982-919a-3dca73236278 service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] No waiting events found dispatching network-vif-plugged-25dc4e9c-46b3-42bc-af52-a7cbc468c28a {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 796.037843] env[68279]: WARNING nova.compute.manager [req-13806a58-aa78-4521-8a3f-48c9f65e79a1 req-4c7e8d4a-c1d2-4982-919a-3dca73236278 service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Received unexpected event network-vif-plugged-25dc4e9c-46b3-42bc-af52-a7cbc468c28a for instance with vm_state building and task_state spawning. [ 796.055361] env[68279]: DEBUG nova.compute.manager [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.055558] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.058322] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e263378-3405-4eab-ad7b-0ca4f6e63673 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.063565] env[68279]: INFO nova.compute.manager [-] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Took 1.30 seconds to deallocate network for instance. [ 796.069051] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.070134] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ceba9a1-3f2e-4948-9fe0-e198f17cf347 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.074425] env[68279]: DEBUG nova.network.neutron [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Successfully updated port: 25dc4e9c-46b3-42bc-af52-a7cbc468c28a {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 796.079087] env[68279]: DEBUG oslo_vmware.api [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 796.079087] env[68279]: value = "task-2962963" [ 796.079087] env[68279]: _type = "Task" [ 796.079087] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.088298] env[68279]: DEBUG oslo_vmware.api [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.207866] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.207866] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5643962-210b-4315-81b6-c3d06200a3f0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.214402] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 796.214402] env[68279]: value = "task-2962964" [ 796.214402] env[68279]: _type = "Task" [ 796.214402] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.226014] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.273322] env[68279]: DEBUG nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.365791] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524388} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.367612] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] d61b2c4f-942a-4e29-8cac-11bc0750605a/d61b2c4f-942a-4e29-8cac-11bc0750605a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 796.367612] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.367612] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67c52316-95dd-4d06-9a15-48c52819cd6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.375307] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 796.375307] env[68279]: value = "task-2962965" [ 796.375307] env[68279]: _type = "Task" [ 796.375307] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.385632] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.548259] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0d613929-44ff-4b88-bee9-b05fa8940d45 tempest-ServerMetadataTestJSON-1646162647 tempest-ServerMetadataTestJSON-1646162647-project-member] Lock "05b94aa5-3efc-4790-9d98-c2658b8e8b4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.806s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.573167] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.576515] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.576731] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.576805] env[68279]: DEBUG nova.network.neutron [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.593620] env[68279]: DEBUG oslo_vmware.api [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962963, 'name': PowerOffVM_Task, 'duration_secs': 0.430606} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.593843] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.594069] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.594414] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63cd105b-c6a6-4b23-a84e-6dcc575d3ffa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.671065] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.671229] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.671408] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleting the datastore file [datastore1] 7858163d-8e68-4565-b1e0-ecd2e9be350d {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.671679] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20ae0602-4d76-418e-8086-883a89de001a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.679337] env[68279]: DEBUG oslo_vmware.api [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 796.679337] env[68279]: value = "task-2962967" [ 796.679337] env[68279]: _type = "Task" [ 796.679337] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.689432] env[68279]: DEBUG oslo_vmware.api [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962967, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.726728] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962964, 'name': PowerOffVM_Task, 'duration_secs': 0.332458} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.727055] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.727844] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22d0ea2-f28b-4c22-ba2b-9424304d0a53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.747265] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917fcef6-93dc-4cb1-9bbe-bbd4debdcfdd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.791600] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.792041] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c5a5de9-3e39-41ac-a1ec-b19c5d481566 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.801935] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 796.801935] env[68279]: value = "task-2962968" [ 796.801935] env[68279]: _type = "Task" [ 796.801935] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.806164] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.813342] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 796.813549] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 796.813793] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.813945] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.814167] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.814569] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c95edc09-65ee-4cf1-b7ee-1189cf3ef495 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.825896] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.826239] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 796.827344] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff6fed34-57ab-4209-8295-7d323399f8b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.833620] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 796.833620] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5287b527-a3cb-50ab-f408-6268abc22ca3" [ 796.833620] env[68279]: _type = "Task" [ 796.833620] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.842442] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5287b527-a3cb-50ab-f408-6268abc22ca3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.886419] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070788} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.886757] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 796.887595] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda318e0-3071-422b-bff9-a30ee50bc5ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.910608] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] d61b2c4f-942a-4e29-8cac-11bc0750605a/d61b2c4f-942a-4e29-8cac-11bc0750605a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 796.912071] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49af6a84-8ced-4777-b875-2000d077b3e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.937938] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 796.937938] env[68279]: value = "task-2962969" [ 796.937938] env[68279]: _type = "Task" [ 796.937938] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.948335] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962969, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.145292] env[68279]: DEBUG nova.network.neutron [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.193423] env[68279]: DEBUG oslo_vmware.api [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2962967, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14413} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.193717] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.193906] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.194118] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.194322] env[68279]: INFO nova.compute.manager [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 797.194582] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.194789] env[68279]: DEBUG nova.compute.manager [-] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.194921] env[68279]: DEBUG nova.network.neutron [-] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.354719] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5287b527-a3cb-50ab-f408-6268abc22ca3, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.365993] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baaf4498-e4f0-4a4c-9436-4b146e8aa922 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.370910] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 797.370910] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c023ae-185e-7334-92a6-3681df4c6da6" [ 797.370910] env[68279]: _type = "Task" [ 797.370910] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.391021] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c023ae-185e-7334-92a6-3681df4c6da6, 'name': SearchDatastore_Task, 'duration_secs': 0.01271} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.391021] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.391021] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. {{(pid=68279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 797.391021] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97e03a2c-bf8c-4918-8228-36297fde22c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.405140] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 797.405140] env[68279]: value = "task-2962970" [ 797.405140] env[68279]: _type = "Task" [ 797.405140] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.413065] env[68279]: DEBUG nova.network.neutron [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Updating instance_info_cache with network_info: [{"id": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "address": "fa:16:3e:16:9e:10", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25dc4e9c-46", "ovs_interfaceid": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.421259] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.430500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "ed86ef15-1941-40c5-8178-344a7b401b58" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.430751] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "ed86ef15-1941-40c5-8178-344a7b401b58" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.430959] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "ed86ef15-1941-40c5-8178-344a7b401b58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.431196] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "ed86ef15-1941-40c5-8178-344a7b401b58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.431378] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "ed86ef15-1941-40c5-8178-344a7b401b58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.433852] env[68279]: INFO nova.compute.manager [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Terminating instance [ 797.454704] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962969, 'name': ReconfigVM_Task, 'duration_secs': 0.421665} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.455709] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Reconfigured VM instance instance-00000032 to attach disk [datastore2] d61b2c4f-942a-4e29-8cac-11bc0750605a/d61b2c4f-942a-4e29-8cac-11bc0750605a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.456409] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9ac3707-8041-41fa-bd91-a7091a48f1ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.468718] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 797.468718] env[68279]: value = "task-2962971" [ 797.468718] env[68279]: _type = "Task" [ 797.468718] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.484366] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962971, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.682659] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cac238-4eaa-4ee7-a865-92ef03b03163 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.693203] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db24914-3948-41d2-a5d7-9a8e12b0ac42 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.738697] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370b2358-248b-4556-9a4c-629777b67754 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.749988] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b972b7e-0ef5-47ab-a50a-ae8055eac5f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.772950] env[68279]: DEBUG nova.compute.provider_tree [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.907339] env[68279]: DEBUG nova.compute.manager [req-1045993c-2285-4665-b196-faebadced863 req-0a9e8fad-506b-4b64-8484-9b3fae5c5ec4 service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Received event network-vif-deleted-ff6e9c1c-196d-4f5d-aee7-1248e8476fa6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.907967] env[68279]: INFO nova.compute.manager [req-1045993c-2285-4665-b196-faebadced863 req-0a9e8fad-506b-4b64-8484-9b3fae5c5ec4 service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Neutron deleted interface ff6e9c1c-196d-4f5d-aee7-1248e8476fa6; detaching it from the instance and deleting it from the info cache [ 797.907967] env[68279]: DEBUG nova.network.neutron [req-1045993c-2285-4665-b196-faebadced863 req-0a9e8fad-506b-4b64-8484-9b3fae5c5ec4 service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.915059] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.915478] env[68279]: DEBUG nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Instance network_info: |[{"id": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "address": "fa:16:3e:16:9e:10", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25dc4e9c-46", "ovs_interfaceid": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 797.916044] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:9e:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25dc4e9c-46b3-42bc-af52-a7cbc468c28a', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.924548] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.928960] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.929547] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962970, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.929769] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1bc74714-da2d-4464-b70f-8b65f0beafe8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.946597] env[68279]: DEBUG nova.compute.manager [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 797.946813] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 797.947893] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e07e2d7-62d5-470a-bc40-16f5bf67d7be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.957439] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 797.959172] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-465291c0-cd08-45b0-9828-8ebf3693d596 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.961346] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.961346] env[68279]: value = "task-2962972" [ 797.961346] env[68279]: _type = "Task" [ 797.961346] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.968081] env[68279]: DEBUG oslo_vmware.api [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 797.968081] env[68279]: value = "task-2962973" [ 797.968081] env[68279]: _type = "Task" [ 797.968081] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.974890] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962972, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.991407] env[68279]: DEBUG oslo_vmware.api [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.996116] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962971, 'name': Rename_Task, 'duration_secs': 0.168378} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.996450] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 797.996780] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32e3b652-4f6c-4a50-b8d4-3af43e08e6cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.005037] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 798.005037] env[68279]: value = "task-2962974" [ 798.005037] env[68279]: _type = "Task" [ 798.005037] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.014683] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962974, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.068603] env[68279]: DEBUG nova.compute.manager [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Received event network-changed-25dc4e9c-46b3-42bc-af52-a7cbc468c28a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 798.068603] env[68279]: DEBUG nova.compute.manager [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Refreshing instance network info cache due to event network-changed-25dc4e9c-46b3-42bc-af52-a7cbc468c28a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 798.069306] env[68279]: DEBUG oslo_concurrency.lockutils [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] Acquiring lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.069306] env[68279]: DEBUG oslo_concurrency.lockutils [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] Acquired lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.069638] env[68279]: DEBUG nova.network.neutron [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Refreshing network info cache for port 25dc4e9c-46b3-42bc-af52-a7cbc468c28a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 798.242612] env[68279]: DEBUG nova.network.neutron [-] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.276850] env[68279]: DEBUG nova.scheduler.client.report [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.418829] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61b2524e-c7c2-43cd-a67a-c020d25e0a5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.427476] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562692} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.429548] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. [ 798.429843] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a46a28-820d-42ba-8224-8eeaf3633178 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.435146] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47af8ca7-392e-449a-a89f-c88615a6adff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.475325] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.493416] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab61c202-c314-49ca-bedc-fb3be0ee9819 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.507110] env[68279]: DEBUG nova.compute.manager [req-1045993c-2285-4665-b196-faebadced863 req-0a9e8fad-506b-4b64-8484-9b3fae5c5ec4 service nova] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Detach interface failed, port_id=ff6e9c1c-196d-4f5d-aee7-1248e8476fa6, reason: Instance 7858163d-8e68-4565-b1e0-ecd2e9be350d could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 798.517638] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962972, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.519364] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 798.519364] env[68279]: value = "task-2962975" [ 798.519364] env[68279]: _type = "Task" [ 798.519364] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.532216] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962974, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.532477] env[68279]: DEBUG oslo_vmware.api [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962973, 'name': PowerOffVM_Task, 'duration_secs': 0.222082} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.533114] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 798.533298] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 798.533574] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e46e99de-3a44-43d0-b9d1-027afdf3d1a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.538596] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.608967] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 798.609175] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 798.609397] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Deleting the datastore file [datastore2] ed86ef15-1941-40c5-8178-344a7b401b58 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 798.609685] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31ca9333-7810-4d40-afdb-49db1a2ce4ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.616767] env[68279]: DEBUG oslo_vmware.api [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for the task: (returnval){ [ 798.616767] env[68279]: value = "task-2962977" [ 798.616767] env[68279]: _type = "Task" [ 798.616767] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.625685] env[68279]: DEBUG oslo_vmware.api [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.747140] env[68279]: INFO nova.compute.manager [-] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Took 1.55 seconds to deallocate network for instance. [ 798.782991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.784s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.783541] env[68279]: DEBUG nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 798.788825] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.812s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.788825] env[68279]: DEBUG nova.objects.instance [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lazy-loading 'resources' on Instance uuid d452e3d2-1590-4352-8406-31d85b2921f4 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.808439] env[68279]: DEBUG nova.network.neutron [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Updated VIF entry in instance network info cache for port 25dc4e9c-46b3-42bc-af52-a7cbc468c28a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.808999] env[68279]: DEBUG nova.network.neutron [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Updating instance_info_cache with network_info: [{"id": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "address": "fa:16:3e:16:9e:10", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25dc4e9c-46", "ovs_interfaceid": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.979139] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962972, 'name': CreateVM_Task, 'duration_secs': 0.682869} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.979264] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.979926] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.980128] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.980481] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 798.980743] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09e0b1b4-5901-4e93-8646-4c6ea02a8773 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.986050] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 798.986050] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b4575d-9f76-f655-e37b-51e86a55ce0c" [ 798.986050] env[68279]: _type = "Task" [ 798.986050] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.994682] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b4575d-9f76-f655-e37b-51e86a55ce0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.022408] env[68279]: DEBUG oslo_vmware.api [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2962974, 'name': PowerOnVM_Task, 'duration_secs': 0.920806} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.027272] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 799.027514] env[68279]: INFO nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Took 9.15 seconds to spawn the instance on the hypervisor. [ 799.027730] env[68279]: DEBUG nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.028884] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95f6229-e5ee-4e9d-8287-a40af84249ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.038824] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.127990] env[68279]: DEBUG oslo_vmware.api [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Task: {'id': task-2962977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26711} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.128816] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 799.129106] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 799.129242] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 799.129522] env[68279]: INFO nova.compute.manager [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Took 1.18 seconds to destroy the instance on the hypervisor. [ 799.129721] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 799.129946] env[68279]: DEBUG nova.compute.manager [-] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 799.130066] env[68279]: DEBUG nova.network.neutron [-] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 799.254401] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.294343] env[68279]: DEBUG nova.compute.utils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 799.295948] env[68279]: DEBUG nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 799.296158] env[68279]: DEBUG nova.network.neutron [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 799.311375] env[68279]: DEBUG oslo_concurrency.lockutils [req-d533f159-8f39-4c6b-ad89-927983ad2113 req-3189b894-0b8d-4ed0-97ee-c7bb1a8ea02f service nova] Releasing lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.412427] env[68279]: DEBUG nova.policy [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655eae57bb1349c0a229c3b57f4d3446', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f947b60992d543c4b0bfee2553bfe357', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 799.499402] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b4575d-9f76-f655-e37b-51e86a55ce0c, 'name': SearchDatastore_Task, 'duration_secs': 0.010604} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.499708] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.499971] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.500343] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.500506] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.500687] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.500967] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e661e10e-3ed8-42ba-87c1-8238e7bd7dc3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.511743] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.512038] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 799.513773] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fa3239a-829d-41a5-839b-10ffa7e387d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.519353] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 799.519353] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e1be53-5507-c6fe-e42e-051330522379" [ 799.519353] env[68279]: _type = "Task" [ 799.519353] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.536172] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e1be53-5507-c6fe-e42e-051330522379, 'name': SearchDatastore_Task, 'duration_secs': 0.009698} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.543013] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962975, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.545287] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8387f34d-9c37-4210-be9a-28d5491aee8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.555105] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 799.555105] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529e02f8-8d21-1917-fb3d-af66cb98bbc0" [ 799.555105] env[68279]: _type = "Task" [ 799.555105] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.555623] env[68279]: INFO nova.compute.manager [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Took 41.28 seconds to build instance. [ 799.567565] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529e02f8-8d21-1917-fb3d-af66cb98bbc0, 'name': SearchDatastore_Task} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.570996] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.571372] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/f807e45c-76d8-46a6-a30b-011e7b8df6a4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.572782] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b037dc4-ed7d-49a8-9191-f311c70ff5b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.583724] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 799.583724] env[68279]: value = "task-2962978" [ 799.583724] env[68279]: _type = "Task" [ 799.583724] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.596160] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.799013] env[68279]: DEBUG nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 799.873437] env[68279]: DEBUG nova.network.neutron [-] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.941727] env[68279]: DEBUG nova.network.neutron [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Successfully created port: aa6d2c82-4995-4c78-98c0-3d13f4d30137 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.948858] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea19c23-f6e7-4b77-8fd7-524b1d80cb36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.959141] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a8bbce-f2ca-4dfb-abf7-6938fe8cc3f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.994101] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b89143-50a2-489f-aa30-0a342af78088 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.005132] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bce1740-88e6-4528-943e-ee6d3b15a711 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.022171] env[68279]: DEBUG nova.compute.provider_tree [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.036385] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962975, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.062631] env[68279]: DEBUG oslo_concurrency.lockutils [None req-15d1f2c2-df44-437b-ad86-aa154a08d9e6 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.237s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.098515] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50559} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.098978] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/f807e45c-76d8-46a6-a30b-011e7b8df6a4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.099380] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.099879] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce760521-a88f-4f63-9214-fa619f41a697 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.108572] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 800.108572] env[68279]: value = "task-2962979" [ 800.108572] env[68279]: _type = "Task" [ 800.108572] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.118562] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.179494] env[68279]: DEBUG nova.compute.manager [req-88caa8a3-d253-4c95-a2e0-5d887edf5cca req-c1ec9d30-9caa-4b51-908b-4a851573ffc1 service nova] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Received event network-vif-deleted-50e28093-1111-493b-88da-b7b9cb496900 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 800.378735] env[68279]: INFO nova.compute.manager [-] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Took 1.25 seconds to deallocate network for instance. [ 800.528345] env[68279]: DEBUG nova.scheduler.client.report [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.538218] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962975, 'name': ReconfigVM_Task, 'duration_secs': 1.541716} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.538503] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Reconfigured VM instance instance-00000030 to attach disk [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 800.540040] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c779e3-1318-4d58-97a1-d9572ecf5aa4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.564222] env[68279]: DEBUG nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.571933] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a02d2420-fea3-4a64-a28b-59c0bbb9627c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.589156] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 800.589156] env[68279]: value = "task-2962980" [ 800.589156] env[68279]: _type = "Task" [ 800.589156] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.598409] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962980, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.624047] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063414} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.624706] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 800.626025] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0870c798-3a58-4666-a1ad-b31aea0732e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.648297] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/f807e45c-76d8-46a6-a30b-011e7b8df6a4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.648646] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c59be850-df4d-42da-bbf6-957803748f75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.672789] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 800.672789] env[68279]: value = "task-2962981" [ 800.672789] env[68279]: _type = "Task" [ 800.672789] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.683112] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962981, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.811762] env[68279]: DEBUG nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 800.837104] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.837392] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.837569] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.837757] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.837910] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.838077] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.838290] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.838451] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.838619] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.839282] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.839282] env[68279]: DEBUG nova.virt.hardware [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.839889] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ced122e-b70c-42ce-9258-07caba00eff3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.848553] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623b5ffa-8608-495f-aa9c-8910a7966613 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.884895] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.034496] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.246s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.037417] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.130s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.038357] env[68279]: INFO nova.compute.claims [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.052525] env[68279]: INFO nova.scheduler.client.report [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Deleted allocations for instance d452e3d2-1590-4352-8406-31d85b2921f4 [ 801.090510] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.101429] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962980, 'name': ReconfigVM_Task, 'duration_secs': 0.417427} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.101712] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.102112] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a554dcca-1a27-45ef-96fd-f32c8964c6fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.111055] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 801.111055] env[68279]: value = "task-2962982" [ 801.111055] env[68279]: _type = "Task" [ 801.111055] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.120936] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.186199] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962981, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.204621] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.204841] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.559228] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6d4b743-18e0-48cd-838f-7be1bf63255f tempest-ServersTestMultiNic-2075120552 tempest-ServersTestMultiNic-2075120552-project-member] Lock "d452e3d2-1590-4352-8406-31d85b2921f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.838s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.579071] env[68279]: DEBUG nova.compute.manager [req-a3da9ba8-7c1e-4217-9176-52605db78a0c req-64ea91ad-47ac-409a-a9d0-cfb43e8d38e5 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Received event network-vif-plugged-aa6d2c82-4995-4c78-98c0-3d13f4d30137 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 801.579632] env[68279]: DEBUG oslo_concurrency.lockutils [req-a3da9ba8-7c1e-4217-9176-52605db78a0c req-64ea91ad-47ac-409a-a9d0-cfb43e8d38e5 service nova] Acquiring lock "4021edd3-346e-44e5-9419-38181cc91c6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.579928] env[68279]: DEBUG oslo_concurrency.lockutils [req-a3da9ba8-7c1e-4217-9176-52605db78a0c req-64ea91ad-47ac-409a-a9d0-cfb43e8d38e5 service nova] Lock "4021edd3-346e-44e5-9419-38181cc91c6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.580267] env[68279]: DEBUG oslo_concurrency.lockutils [req-a3da9ba8-7c1e-4217-9176-52605db78a0c req-64ea91ad-47ac-409a-a9d0-cfb43e8d38e5 service nova] Lock "4021edd3-346e-44e5-9419-38181cc91c6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.580830] env[68279]: DEBUG nova.compute.manager [req-a3da9ba8-7c1e-4217-9176-52605db78a0c req-64ea91ad-47ac-409a-a9d0-cfb43e8d38e5 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] No waiting events found dispatching network-vif-plugged-aa6d2c82-4995-4c78-98c0-3d13f4d30137 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 801.581073] env[68279]: WARNING nova.compute.manager [req-a3da9ba8-7c1e-4217-9176-52605db78a0c req-64ea91ad-47ac-409a-a9d0-cfb43e8d38e5 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Received unexpected event network-vif-plugged-aa6d2c82-4995-4c78-98c0-3d13f4d30137 for instance with vm_state building and task_state spawning. [ 801.624244] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962982, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.692323] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962981, 'name': ReconfigVM_Task, 'duration_secs': 0.783198} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.692323] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Reconfigured VM instance instance-00000033 to attach disk [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/f807e45c-76d8-46a6-a30b-011e7b8df6a4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.692561] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2c2da87-8aa3-426a-8090-a8de409b4dd5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.702862] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 801.702862] env[68279]: value = "task-2962983" [ 801.702862] env[68279]: _type = "Task" [ 801.702862] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.713734] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962983, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.772913] env[68279]: DEBUG nova.network.neutron [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Successfully updated port: aa6d2c82-4995-4c78-98c0-3d13f4d30137 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 802.121782] env[68279]: DEBUG oslo_vmware.api [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962982, 'name': PowerOnVM_Task, 'duration_secs': 0.670257} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.122034] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.125039] env[68279]: DEBUG nova.compute.manager [None req-f1aec472-361b-46fe-9c75-925616e0a064 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 802.125850] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e0ff80-da69-45e0-8b79-a6b72354da74 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.214027] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962983, 'name': Rename_Task, 'duration_secs': 0.245975} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.214395] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 802.214757] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bbe9fb0-660a-4fb2-93c7-b6a57b247380 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.221836] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 802.221836] env[68279]: value = "task-2962984" [ 802.221836] env[68279]: _type = "Task" [ 802.221836] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.230491] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.278348] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-4021edd3-346e-44e5-9419-38181cc91c6a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.278348] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-4021edd3-346e-44e5-9419-38181cc91c6a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.278348] env[68279]: DEBUG nova.network.neutron [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.628091] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59977191-e3ab-44ef-84ea-fe2a6709ab98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.635777] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9777d8-9956-4382-a73d-d11cbed382c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.673898] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b1a93c-ec4c-4c06-b844-efae9498461a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.684417] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226fc195-c78e-4f86-903c-472bac61a43a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.702216] env[68279]: DEBUG nova.compute.provider_tree [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.732651] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962984, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.860832] env[68279]: DEBUG nova.network.neutron [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.120886] env[68279]: DEBUG nova.network.neutron [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Updating instance_info_cache with network_info: [{"id": "aa6d2c82-4995-4c78-98c0-3d13f4d30137", "address": "fa:16:3e:f4:22:0b", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6d2c82-49", "ovs_interfaceid": "aa6d2c82-4995-4c78-98c0-3d13f4d30137", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.205850] env[68279]: DEBUG nova.scheduler.client.report [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.235352] env[68279]: DEBUG oslo_vmware.api [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962984, 'name': PowerOnVM_Task, 'duration_secs': 0.856086} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.235573] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.235781] env[68279]: INFO nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Took 8.43 seconds to spawn the instance on the hypervisor. [ 803.235968] env[68279]: DEBUG nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.237023] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f543b02a-dda3-4ed8-a825-c845be291750 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.627020] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-4021edd3-346e-44e5-9419-38181cc91c6a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.627020] env[68279]: DEBUG nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Instance network_info: |[{"id": "aa6d2c82-4995-4c78-98c0-3d13f4d30137", "address": "fa:16:3e:f4:22:0b", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6d2c82-49", "ovs_interfaceid": "aa6d2c82-4995-4c78-98c0-3d13f4d30137", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 803.627020] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:22:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa6d2c82-4995-4c78-98c0-3d13f4d30137', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.635050] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.636721] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.638155] env[68279]: DEBUG nova.compute.manager [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Received event network-changed-aa6d2c82-4995-4c78-98c0-3d13f4d30137 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.638483] env[68279]: DEBUG nova.compute.manager [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Refreshing instance network info cache due to event network-changed-aa6d2c82-4995-4c78-98c0-3d13f4d30137. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 803.638846] env[68279]: DEBUG oslo_concurrency.lockutils [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] Acquiring lock "refresh_cache-4021edd3-346e-44e5-9419-38181cc91c6a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.639137] env[68279]: DEBUG oslo_concurrency.lockutils [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] Acquired lock "refresh_cache-4021edd3-346e-44e5-9419-38181cc91c6a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.639450] env[68279]: DEBUG nova.network.neutron [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Refreshing network info cache for port aa6d2c82-4995-4c78-98c0-3d13f4d30137 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.641318] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca58b754-bf3a-40f5-9383-c3184f47607a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.667530] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.667530] env[68279]: value = "task-2962985" [ 803.667530] env[68279]: _type = "Task" [ 803.667530] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.677545] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962985, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.711156] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.674s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.711698] env[68279]: DEBUG nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.714690] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.113s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.716476] env[68279]: INFO nova.compute.claims [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.757630] env[68279]: INFO nova.compute.manager [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Took 41.23 seconds to build instance. [ 804.178318] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962985, 'name': CreateVM_Task, 'duration_secs': 0.350562} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.178562] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.179384] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.179553] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.179902] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 804.180221] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0686779b-65b9-47a5-8571-b2853c706734 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.186303] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 804.186303] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52945a5d-1735-9f56-a070-8276129438fd" [ 804.186303] env[68279]: _type = "Task" [ 804.186303] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.194874] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52945a5d-1735-9f56-a070-8276129438fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.221746] env[68279]: DEBUG nova.compute.utils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 804.226117] env[68279]: DEBUG nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 804.226823] env[68279]: DEBUG nova.network.neutron [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 804.259147] env[68279]: DEBUG oslo_concurrency.lockutils [None req-95a99caa-859a-45c9-8dc1-4d634f99d782 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.154s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.275076] env[68279]: DEBUG nova.policy [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4815a67fd1c410f82905f7ebe2a4c9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e02764848813428dbe0f88c32ad935ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 804.324828] env[68279]: INFO nova.compute.manager [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Rescuing [ 804.328016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.328016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.328016] env[68279]: DEBUG nova.network.neutron [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.514252] env[68279]: DEBUG nova.network.neutron [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Updated VIF entry in instance network info cache for port aa6d2c82-4995-4c78-98c0-3d13f4d30137. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.514252] env[68279]: DEBUG nova.network.neutron [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Updating instance_info_cache with network_info: [{"id": "aa6d2c82-4995-4c78-98c0-3d13f4d30137", "address": "fa:16:3e:f4:22:0b", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6d2c82-49", "ovs_interfaceid": "aa6d2c82-4995-4c78-98c0-3d13f4d30137", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.698374] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52945a5d-1735-9f56-a070-8276129438fd, 'name': SearchDatastore_Task, 'duration_secs': 0.013699} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.698795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.698795] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.698942] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.699137] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.699281] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.699538] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff093388-b3d1-41df-8349-6e3337b61bd6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.706219] env[68279]: DEBUG nova.network.neutron [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Successfully created port: 21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.710781] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.710983] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 804.711863] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6890c774-45f6-4be1-9fff-27f77d37bc4b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.722026] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 804.722026] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214813d-342a-dfdc-717a-18c3a38abd17" [ 804.722026] env[68279]: _type = "Task" [ 804.722026] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.726564] env[68279]: DEBUG nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 804.735355] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214813d-342a-dfdc-717a-18c3a38abd17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.762060] env[68279]: DEBUG nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 805.017279] env[68279]: DEBUG oslo_concurrency.lockutils [req-73e2f4b8-1986-4abe-b985-d04ea4a7d983 req-df5417ea-8f2b-4c0a-ab08-0d9da83635e4 service nova] Releasing lock "refresh_cache-4021edd3-346e-44e5-9419-38181cc91c6a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.061889] env[68279]: INFO nova.compute.manager [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Rescuing [ 805.062174] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.062326] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.062504] env[68279]: DEBUG nova.network.neutron [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.155035] env[68279]: DEBUG nova.network.neutron [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Updating instance_info_cache with network_info: [{"id": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "address": "fa:16:3e:16:9e:10", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25dc4e9c-46", "ovs_interfaceid": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.233092] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5214813d-342a-dfdc-717a-18c3a38abd17, 'name': SearchDatastore_Task, 'duration_secs': 0.0118} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.236259] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed8d64e1-a648-4b2a-9f68-0960b0d20090 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.245415] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 805.245415] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a0cc05-19e8-4745-b893-1c3ec251ee71" [ 805.245415] env[68279]: _type = "Task" [ 805.245415] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.253645] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a0cc05-19e8-4745-b893-1c3ec251ee71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.285564] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.328767] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f1d7f4-186f-4ec9-8ed9-3a1d8fca8f6c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.336942] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d857e9c6-33b6-46e2-930d-c2a464a4e288 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.369775] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def3bf07-b382-4c2b-801b-33e931b9e9c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.376678] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41746fd-e2bf-492f-b1f3-13513ff05c5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.395316] env[68279]: DEBUG nova.compute.provider_tree [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.659699] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.745247] env[68279]: DEBUG nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 805.762167] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a0cc05-19e8-4745-b893-1c3ec251ee71, 'name': SearchDatastore_Task, 'duration_secs': 0.009955} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.765663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.766230] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 805.769819] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7cee0f0-9143-4bda-b29f-70d1034d02ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.777866] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 805.777866] env[68279]: value = "task-2962986" [ 805.777866] env[68279]: _type = "Task" [ 805.777866] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.780112] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.780384] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.780575] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.780799] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.781106] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.781263] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.781533] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.781732] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.781937] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.782157] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.782367] env[68279]: DEBUG nova.virt.hardware [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.783267] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1995bf2-20db-4b69-9842-48a73aebb36e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.797395] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.799143] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591aec75-5d5d-4939-a0a7-90dd81c79974 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.868240] env[68279]: DEBUG nova.network.neutron [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Updating instance_info_cache with network_info: [{"id": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "address": "fa:16:3e:ae:b4:7f", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8067a6f-39", "ovs_interfaceid": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.899210] env[68279]: DEBUG nova.scheduler.client.report [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 806.294081] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480109} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.295385] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 806.295577] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.296666] env[68279]: DEBUG nova.compute.manager [req-fbd6117d-2941-449a-9d52-814937c8b88d req-88361d27-ee4a-4fa2-89b0-e51b588e49d3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Received event network-vif-plugged-21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.296873] env[68279]: DEBUG oslo_concurrency.lockutils [req-fbd6117d-2941-449a-9d52-814937c8b88d req-88361d27-ee4a-4fa2-89b0-e51b588e49d3 service nova] Acquiring lock "c62a0d0e-8869-482a-a687-c628b96d6e22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.297145] env[68279]: DEBUG oslo_concurrency.lockutils [req-fbd6117d-2941-449a-9d52-814937c8b88d req-88361d27-ee4a-4fa2-89b0-e51b588e49d3 service nova] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.297286] env[68279]: DEBUG oslo_concurrency.lockutils [req-fbd6117d-2941-449a-9d52-814937c8b88d req-88361d27-ee4a-4fa2-89b0-e51b588e49d3 service nova] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.297453] env[68279]: DEBUG nova.compute.manager [req-fbd6117d-2941-449a-9d52-814937c8b88d req-88361d27-ee4a-4fa2-89b0-e51b588e49d3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] No waiting events found dispatching network-vif-plugged-21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 806.297617] env[68279]: WARNING nova.compute.manager [req-fbd6117d-2941-449a-9d52-814937c8b88d req-88361d27-ee4a-4fa2-89b0-e51b588e49d3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Received unexpected event network-vif-plugged-21e228fc-8c90-47b4-ae9b-5e46f3ad748b for instance with vm_state building and task_state spawning. [ 806.298093] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c9eb82a-fd90-4f19-a84f-122076c23d46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.305670] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 806.305670] env[68279]: value = "task-2962987" [ 806.305670] env[68279]: _type = "Task" [ 806.305670] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.315758] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.371721] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.409120] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.409177] env[68279]: DEBUG nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 806.412641] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.539s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.412861] env[68279]: DEBUG nova.objects.instance [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 806.477127] env[68279]: DEBUG nova.network.neutron [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Successfully updated port: 21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.816023] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10597} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.816541] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.817139] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe561d0-6406-4733-9a26-c2858b871ca7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.843380] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.844169] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c68041e3-2cda-4e83-a1cf-d77a332c13ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.865563] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 806.865563] env[68279]: value = "task-2962988" [ 806.865563] env[68279]: _type = "Task" [ 806.865563] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.873933] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.917532] env[68279]: DEBUG nova.compute.utils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 806.921530] env[68279]: DEBUG nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 806.921530] env[68279]: DEBUG nova.network.neutron [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 806.967694] env[68279]: DEBUG nova.policy [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e35ee3c842384e36a91fb335dd81e98a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fadbbd31a4314d12a378689150d3a24d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 806.979447] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.979762] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.979973] env[68279]: DEBUG nova.network.neutron [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.201878] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.201960] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab52cbcd-f0e6-4e27-9639-f8987bb9991a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.209813] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 807.209813] env[68279]: value = "task-2962989" [ 807.209813] env[68279]: _type = "Task" [ 807.209813] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.219740] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.286647] env[68279]: DEBUG nova.network.neutron [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Successfully created port: 81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.376733] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962988, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.420713] env[68279]: DEBUG nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 807.425897] env[68279]: DEBUG oslo_concurrency.lockutils [None req-916230b3-7432-4cc9-a17b-eaf8396da554 tempest-ServersAdmin275Test-1015736347 tempest-ServersAdmin275Test-1015736347-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.427818] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.746s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.428136] env[68279]: DEBUG nova.objects.instance [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lazy-loading 'resources' on Instance uuid a332b35f-4f96-4f8f-aa9a-d7fadf9ede53 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.554305] env[68279]: DEBUG nova.network.neutron [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.720293] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962989, 'name': PowerOffVM_Task, 'duration_secs': 0.376122} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.720676] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.721559] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09753065-b307-4e7e-b5da-45d2e09f8d60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.741094] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d64df45-69d4-40b5-91d1-223c8e72ebdd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.788317] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.788601] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-739a18cf-ac4a-4bd4-b7e4-0c81ee4a91b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.796159] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 807.796159] env[68279]: value = "task-2962990" [ 807.796159] env[68279]: _type = "Task" [ 807.796159] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.804668] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.836335] env[68279]: DEBUG nova.network.neutron [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Updating instance_info_cache with network_info: [{"id": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "address": "fa:16:3e:f7:ae:f0", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21e228fc-8c", "ovs_interfaceid": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.883903] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962988, 'name': ReconfigVM_Task, 'duration_secs': 0.778568} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.884233] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.885786] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f389c4e9-26b1-4a66-acde-5e169079e184 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.890769] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "8aa8c866-4807-4a06-904e-53c149047d65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.891015] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "8aa8c866-4807-4a06-904e-53c149047d65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.897189] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 807.897189] env[68279]: value = "task-2962991" [ 807.897189] env[68279]: _type = "Task" [ 807.897189] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.908653] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962991, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.916665] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.916991] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd497a79-7706-4862-83c1-5902a7e03a86 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.930422] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 807.930422] env[68279]: value = "task-2962992" [ 807.930422] env[68279]: _type = "Task" [ 807.930422] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.942560] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.307247] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 808.307520] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.307803] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.307974] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.308229] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.308510] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-459e40fc-9187-4c82-8ba5-64c40c817eb8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.317953] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.318184] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 808.322373] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-603fa63a-1717-41f9-84ec-10cb6df2f453 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.329408] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 808.329408] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521368b8-cd10-79c5-7403-722cbba5be8f" [ 808.329408] env[68279]: _type = "Task" [ 808.329408] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.337419] env[68279]: DEBUG nova.compute.manager [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Received event network-changed-21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 808.337419] env[68279]: DEBUG nova.compute.manager [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Refreshing instance network info cache due to event network-changed-21e228fc-8c90-47b4-ae9b-5e46f3ad748b. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 808.337419] env[68279]: DEBUG oslo_concurrency.lockutils [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] Acquiring lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.343300] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.344032] env[68279]: DEBUG nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Instance network_info: |[{"id": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "address": "fa:16:3e:f7:ae:f0", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21e228fc-8c", "ovs_interfaceid": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 808.344032] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521368b8-cd10-79c5-7403-722cbba5be8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.344425] env[68279]: DEBUG oslo_concurrency.lockutils [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] Acquired lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.344604] env[68279]: DEBUG nova.network.neutron [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Refreshing network info cache for port 21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.345749] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:ae:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd19577c9-1b2e-490b-8031-2f278dd3f570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21e228fc-8c90-47b4-ae9b-5e46f3ad748b', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.353717] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Creating folder: Project (e02764848813428dbe0f88c32ad935ef). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.357203] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4276f52e-c5dc-45f8-8026-bfb2a9d5c337 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.379212] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Created folder: Project (e02764848813428dbe0f88c32ad935ef) in parent group-v594445. [ 808.379212] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Creating folder: Instances. Parent ref: group-v594596. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.379212] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-118915cf-ec48-4a21-aa83-7e711b1d419c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.389325] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Created folder: Instances in parent group-v594596. [ 808.392018] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.392018] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.392018] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ac5a827-6601-4fa9-93aa-65351bad983a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.420402] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962991, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.423529] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.423529] env[68279]: value = "task-2962995" [ 808.423529] env[68279]: _type = "Task" [ 808.423529] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.436878] env[68279]: DEBUG nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 808.438809] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962995, 'name': CreateVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.446562] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2962992, 'name': PowerOffVM_Task, 'duration_secs': 0.198828} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.446837] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.447649] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9112ef89-5c97-465e-bbd9-81ceed45488b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.471676] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed005c61-f1fd-4e89-a872-b518fa536bd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.483071] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.483364] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.483523] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.483708] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.483862] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.484030] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.484244] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.484434] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.484566] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.484725] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.484923] env[68279]: DEBUG nova.virt.hardware [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.485905] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7febb5cd-d88b-4546-a97a-2c6600ead23e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.498299] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2beaa7-f81e-45f5-9b79-580e0acbccb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.517747] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 808.518055] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7e46819-baa9-4d6a-b330-c155a61d68ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.530679] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 808.530679] env[68279]: value = "task-2962996" [ 808.530679] env[68279]: _type = "Task" [ 808.530679] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.541966] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 808.542191] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.542406] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.615682] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff0f9e0-0e19-46bd-955e-b17388dec56a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.622914] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e939e9-f7f3-4db4-9cc2-e793c6426654 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.658694] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8f4964-105d-430d-9def-e2ca0dc4e935 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.669172] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e76bb8-7121-4496-b3bc-b180ebb1e0ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.686901] env[68279]: DEBUG nova.compute.provider_tree [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 808.734505] env[68279]: DEBUG nova.network.neutron [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Updated VIF entry in instance network info cache for port 21e228fc-8c90-47b4-ae9b-5e46f3ad748b. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.734866] env[68279]: DEBUG nova.network.neutron [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Updating instance_info_cache with network_info: [{"id": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "address": "fa:16:3e:f7:ae:f0", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21e228fc-8c", "ovs_interfaceid": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.841033] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521368b8-cd10-79c5-7403-722cbba5be8f, 'name': SearchDatastore_Task, 'duration_secs': 0.01023} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.841704] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0bd849c-3294-4b31-bd71-427b06dd020d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.848264] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 808.848264] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bd53c9-42bd-d076-0beb-cf2ad8fb3e6f" [ 808.848264] env[68279]: _type = "Task" [ 808.848264] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.851995] env[68279]: DEBUG nova.network.neutron [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Successfully updated port: 81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 808.857993] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bd53c9-42bd-d076-0beb-cf2ad8fb3e6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.920741] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962991, 'name': Rename_Task, 'duration_secs': 0.849874} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.921017] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 808.921794] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc43ae7a-6790-47a0-a7c7-00dd9a6f8992 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.928845] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 808.928845] env[68279]: value = "task-2962997" [ 808.928845] env[68279]: _type = "Task" [ 808.928845] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.936724] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962995, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.942230] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.210236] env[68279]: ERROR nova.scheduler.client.report [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] [req-4913ea29-83a8-4c20-a06a-c7d77205076c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4913ea29-83a8-4c20-a06a-c7d77205076c"}]} [ 809.226685] env[68279]: DEBUG nova.scheduler.client.report [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 809.238334] env[68279]: DEBUG oslo_concurrency.lockutils [req-93b7dc85-f770-4153-903e-89f2cdeada06 req-6e20adb0-fe86-4ca5-9d81-91495868a8b8 service nova] Releasing lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.241096] env[68279]: DEBUG nova.scheduler.client.report [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 809.241332] env[68279]: DEBUG nova.compute.provider_tree [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 809.252805] env[68279]: DEBUG nova.scheduler.client.report [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 809.272599] env[68279]: DEBUG nova.scheduler.client.report [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 809.354995] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.355132] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.355179] env[68279]: DEBUG nova.network.neutron [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.366363] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bd53c9-42bd-d076-0beb-cf2ad8fb3e6f, 'name': SearchDatastore_Task, 'duration_secs': 0.009756} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.369293] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.369560] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. {{(pid=68279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 809.374671] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.374671] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.374842] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ea1b3ae-c26c-403f-ba39-4cdd6615dc43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.379158] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7c3c6d4-5b5f-442d-a73f-94e071746322 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.389303] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 809.389303] env[68279]: value = "task-2962998" [ 809.389303] env[68279]: _type = "Task" [ 809.389303] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.390703] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.390879] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.391983] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-153e5936-58ec-409b-a085-0ecb17bcaec7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.397561] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.401894] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 809.401894] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523d4d8c-bd6a-4e1b-ef2f-02ed584a94dd" [ 809.401894] env[68279]: _type = "Task" [ 809.401894] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.409513] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523d4d8c-bd6a-4e1b-ef2f-02ed584a94dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.436809] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962995, 'name': CreateVM_Task, 'duration_secs': 0.589091} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.439829] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.442766] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.442766] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.442766] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 809.448409] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92aa2bb8-8f9f-4ff3-84ce-b0d7ad2942f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.450291] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962997, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.458096] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 809.458096] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520901c1-7858-671e-f421-ab66b2375560" [ 809.458096] env[68279]: _type = "Task" [ 809.458096] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.464918] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520901c1-7858-671e-f421-ab66b2375560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.786851] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf82e216-1fc7-40f7-97a2-7eefc59c0ca6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.796023] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8340c0-2def-4d17-b7b4-da69ef89cbd7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.829436] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c95a5ba-73b5-436b-841e-8ab8555b830d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.837660] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773ef0c0-6f24-4dc4-98cb-9d479c7a81b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.853579] env[68279]: DEBUG nova.compute.provider_tree [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 809.899539] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962998, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.900609] env[68279]: DEBUG nova.network.neutron [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.914488] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523d4d8c-bd6a-4e1b-ef2f-02ed584a94dd, 'name': SearchDatastore_Task, 'duration_secs': 0.019627} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.915304] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82ef6d45-6157-4bfb-8816-62620fd57580 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.922156] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 809.922156] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f22b94-c8ea-96ab-0b89-3c370758ada3" [ 809.922156] env[68279]: _type = "Task" [ 809.922156] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.935845] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f22b94-c8ea-96ab-0b89-3c370758ada3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.949945] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962997, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.970293] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520901c1-7858-671e-f421-ab66b2375560, 'name': SearchDatastore_Task, 'duration_secs': 0.009587} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.971630] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.971630] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.971630] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.971630] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.974082] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.974082] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dd2dead-37f0-4878-bf69-c12a7aaee4cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.000057] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.000057] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.000824] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-904337d3-bda7-4e6c-8858-1f7bd2d08434 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.008193] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 810.008193] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b85eff-a090-ca95-46f8-460c840f7676" [ 810.008193] env[68279]: _type = "Task" [ 810.008193] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.019053] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b85eff-a090-ca95-46f8-460c840f7676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.073578] env[68279]: DEBUG nova.network.neutron [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updating instance_info_cache with network_info: [{"id": "81109740-d2ff-4cc1-babf-d587de399274", "address": "fa:16:3e:9f:e4:40", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81109740-d2", "ovs_interfaceid": "81109740-d2ff-4cc1-babf-d587de399274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.313395] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Acquiring lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.314361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.388478] env[68279]: DEBUG nova.scheduler.client.report [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 810.390023] env[68279]: DEBUG nova.compute.provider_tree [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 77 to 78 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 810.390023] env[68279]: DEBUG nova.compute.provider_tree [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 810.402914] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67424} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.403377] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. [ 810.404024] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8902e99e-417d-4165-85f7-b57e628448a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.431039] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 810.432954] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-118b0ea6-93a9-45ab-aeb0-fffb493ff1ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.457765] env[68279]: DEBUG oslo_vmware.api [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2962997, 'name': PowerOnVM_Task, 'duration_secs': 1.179325} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.462399] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 810.462646] env[68279]: INFO nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Took 9.65 seconds to spawn the instance on the hypervisor. [ 810.462835] env[68279]: DEBUG nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 810.463161] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f22b94-c8ea-96ab-0b89-3c370758ada3, 'name': SearchDatastore_Task, 'duration_secs': 0.059186} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.463420] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 810.463420] env[68279]: value = "task-2962999" [ 810.463420] env[68279]: _type = "Task" [ 810.463420] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.464349] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449e3247-4a2f-425e-baf1-1b02c26a1de8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.467632] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.467802] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. {{(pid=68279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 810.468392] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d6fa83f-4eb7-4f57-9c88-69eef7c076a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.487273] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 810.487273] env[68279]: value = "task-2963000" [ 810.487273] env[68279]: _type = "Task" [ 810.487273] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.487273] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.492638] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.520751] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b85eff-a090-ca95-46f8-460c840f7676, 'name': SearchDatastore_Task, 'duration_secs': 0.020332} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.521538] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c08e9806-86f5-4639-b168-16dedc36f1de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.530116] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 810.530116] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c09ed-f0aa-3c66-db90-254021858753" [ 810.530116] env[68279]: _type = "Task" [ 810.530116] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.538275] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c09ed-f0aa-3c66-db90-254021858753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.576703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Releasing lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.577134] env[68279]: DEBUG nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance network_info: |[{"id": "81109740-d2ff-4cc1-babf-d587de399274", "address": "fa:16:3e:9f:e4:40", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81109740-d2", "ovs_interfaceid": "81109740-d2ff-4cc1-babf-d587de399274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 810.577917] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:e4:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81109740-d2ff-4cc1-babf-d587de399274', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 810.585956] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.586198] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 810.586429] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6343f197-704b-490c-b56f-3d05beb76090 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.607152] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 810.607152] env[68279]: value = "task-2963001" [ 810.607152] env[68279]: _type = "Task" [ 810.607152] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.614815] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963001, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.671350] env[68279]: DEBUG nova.compute.manager [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Received event network-vif-plugged-81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.671612] env[68279]: DEBUG oslo_concurrency.lockutils [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] Acquiring lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.671953] env[68279]: DEBUG oslo_concurrency.lockutils [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.672142] env[68279]: DEBUG oslo_concurrency.lockutils [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.672324] env[68279]: DEBUG nova.compute.manager [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] No waiting events found dispatching network-vif-plugged-81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 810.672510] env[68279]: WARNING nova.compute.manager [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Received unexpected event network-vif-plugged-81109740-d2ff-4cc1-babf-d587de399274 for instance with vm_state building and task_state spawning. [ 810.672665] env[68279]: DEBUG nova.compute.manager [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Received event network-changed-81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.672834] env[68279]: DEBUG nova.compute.manager [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Refreshing instance network info cache due to event network-changed-81109740-d2ff-4cc1-babf-d587de399274. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 810.673061] env[68279]: DEBUG oslo_concurrency.lockutils [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] Acquiring lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.673245] env[68279]: DEBUG oslo_concurrency.lockutils [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] Acquired lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.673409] env[68279]: DEBUG nova.network.neutron [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Refreshing network info cache for port 81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.817670] env[68279]: DEBUG nova.compute.utils [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 810.894493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.467s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.897153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.835s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.898768] env[68279]: INFO nova.compute.claims [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.919779] env[68279]: INFO nova.scheduler.client.report [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Deleted allocations for instance a332b35f-4f96-4f8f-aa9a-d7fadf9ede53 [ 810.980099] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.995965] env[68279]: INFO nova.compute.manager [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Took 47.27 seconds to build instance. [ 811.001989] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963000, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.041941] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c09ed-f0aa-3c66-db90-254021858753, 'name': SearchDatastore_Task, 'duration_secs': 0.016736} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.042758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.043000] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] c62a0d0e-8869-482a-a687-c628b96d6e22/c62a0d0e-8869-482a-a687-c628b96d6e22.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 811.043305] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2a0bccf-228b-4b0e-a5ac-5395f0364b70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.051206] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 811.051206] env[68279]: value = "task-2963002" [ 811.051206] env[68279]: _type = "Task" [ 811.051206] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.059683] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.116627] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963001, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.321645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.398463] env[68279]: DEBUG nova.network.neutron [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updated VIF entry in instance network info cache for port 81109740-d2ff-4cc1-babf-d587de399274. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.398926] env[68279]: DEBUG nova.network.neutron [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updating instance_info_cache with network_info: [{"id": "81109740-d2ff-4cc1-babf-d587de399274", "address": "fa:16:3e:9f:e4:40", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81109740-d2", "ovs_interfaceid": "81109740-d2ff-4cc1-babf-d587de399274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.427568] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d82a3e61-276d-4243-af47-7fefc49b8b35 tempest-ServersAdmin275Test-2049145939 tempest-ServersAdmin275Test-2049145939-project-member] Lock "a332b35f-4f96-4f8f-aa9a-d7fadf9ede53" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.075s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.479368] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2962999, 'name': ReconfigVM_Task, 'duration_secs': 0.708276} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.479750] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Reconfigured VM instance instance-00000033 to attach disk [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.480704] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0870cef9-51ab-4f5b-a544-f8a47d2d8c4d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.510287] env[68279]: DEBUG oslo_concurrency.lockutils [None req-755e89dd-a925-469d-821a-aa8869aba749 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "4021edd3-346e-44e5-9419-38181cc91c6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.377s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.510892] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b651a3e5-2756-485a-ab65-db48c847e03c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.533463] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.533786] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.534024] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.534294] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.534624] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.536370] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585959} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.537383] env[68279]: INFO nova.compute.manager [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Terminating instance [ 811.539971] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. [ 811.540754] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 811.540754] env[68279]: value = "task-2963003" [ 811.540754] env[68279]: _type = "Task" [ 811.540754] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.542312] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725839f8-8b5c-40d5-b2bc-b9e9f3862b16 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.555035] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963003, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.581089] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.581652] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5196535a-a917-4570-b14d-6e1a67f88fd7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.600330] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963002, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.601588] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 811.601588] env[68279]: value = "task-2963004" [ 811.601588] env[68279]: _type = "Task" [ 811.601588] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.609313] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963004, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.616206] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963001, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.901518] env[68279]: DEBUG oslo_concurrency.lockutils [req-a29a554c-f2c5-4f52-b676-58d7c2349387 req-b1076215-0713-4981-b97c-ceb8585bc4e5 service nova] Releasing lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.024191] env[68279]: DEBUG nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 812.046419] env[68279]: DEBUG nova.compute.manager [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.046655] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.050954] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1695e53-a5ed-4223-a432-624f74d1ed11 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.065349] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963003, 'name': ReconfigVM_Task, 'duration_secs': 0.493757} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.065622] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.069098] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 812.072098] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df4f6501-21d7-4c13-b4d6-a99f6aaeec3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.073863] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eab4aab1-452d-4be2-bfac-82d225a82cff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.076019] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963002, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.081284] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 812.081284] env[68279]: value = "task-2963006" [ 812.081284] env[68279]: _type = "Task" [ 812.081284] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.084772] env[68279]: DEBUG oslo_vmware.api [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 812.084772] env[68279]: value = "task-2963005" [ 812.084772] env[68279]: _type = "Task" [ 812.084772] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.095678] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.101878] env[68279]: DEBUG oslo_vmware.api [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.112160] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.123218] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963001, 'name': CreateVM_Task, 'duration_secs': 1.38497} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.123420] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 812.124135] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.124315] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.124643] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 812.125391] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e31a124-dbe4-46e9-b461-3159ff76b286 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.132128] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 812.132128] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526a3c94-d0d0-f01f-7115-5739410abad8" [ 812.132128] env[68279]: _type = "Task" [ 812.132128] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.141757] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526a3c94-d0d0-f01f-7115-5739410abad8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.391026] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Acquiring lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.391970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.391970] env[68279]: INFO nova.compute.manager [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Attaching volume 8b1bb50d-bb4d-4019-9ffa-0979867f8452 to /dev/sdb [ 812.447775] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8acfe3b-e226-4bfd-902e-375a6729e839 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.454595] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572101a3-afab-42c7-8089-0860060bdc94 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.473919] env[68279]: DEBUG nova.virt.block_device [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updating existing volume attachment record: 1ddfa050-7130-4091-8f52-d3859a721345 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 812.478085] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd06b10d-2773-4269-8f05-aa83b4a9a8e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.485370] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5a3734-2dbb-4134-aee6-1c4d280ad0c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.521895] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760b5b32-84c8-4835-a105-18d918bcf501 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.532732] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1810e2b-e5b8-4904-9bc8-fb3c056007ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.549537] env[68279]: DEBUG nova.compute.provider_tree [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.556083] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.564876] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963002, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.598087] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963006, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.601424] env[68279]: DEBUG oslo_vmware.api [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963005, 'name': PowerOffVM_Task, 'duration_secs': 0.279966} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.601620] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 812.601819] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 812.602120] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12364adc-affe-4987-be8d-5b2586655803 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.612724] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.643077] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526a3c94-d0d0-f01f-7115-5739410abad8, 'name': SearchDatastore_Task, 'duration_secs': 0.010249} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.643283] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.643538] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 812.643790] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.643956] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.644191] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.644564] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63eacbe0-1646-4e68-8fb8-f0925078959c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.653433] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.653651] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 812.654664] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6013861c-6053-4905-877c-2658fcd31991 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.660883] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 812.660883] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520fac8e-48a8-b4bf-9e91-fdfd37cd71d8" [ 812.660883] env[68279]: _type = "Task" [ 812.660883] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.668899] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 812.669178] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 812.669993] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Deleting the datastore file [datastore1] 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.673066] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f47ba07-1cba-4f6d-b5e9-cd6abdb6403c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.675244] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520fac8e-48a8-b4bf-9e91-fdfd37cd71d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.679972] env[68279]: DEBUG oslo_vmware.api [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 812.679972] env[68279]: value = "task-2963009" [ 812.679972] env[68279]: _type = "Task" [ 812.679972] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.689432] env[68279]: DEBUG oslo_vmware.api [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.986907] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "7d15a05a-f827-40a7-b182-5d2b553481c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.987774] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.054481] env[68279]: DEBUG nova.scheduler.client.report [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.069577] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963002, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.51762} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.069577] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] c62a0d0e-8869-482a-a687-c628b96d6e22/c62a0d0e-8869-482a-a687-c628b96d6e22.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 813.069577] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 813.069577] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f44b797-97e3-4e8b-bed3-af0bbf3da550 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.076514] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 813.076514] env[68279]: value = "task-2963012" [ 813.076514] env[68279]: _type = "Task" [ 813.076514] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.085944] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.094772] env[68279]: DEBUG oslo_vmware.api [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963006, 'name': PowerOnVM_Task, 'duration_secs': 0.728776} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.095051] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.098932] env[68279]: DEBUG nova.compute.manager [None req-5d9d9147-1749-497b-ab5e-71aa2d018219 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.099971] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73888419-c01f-418d-9528-4f57b534e503 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.118078] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963004, 'name': ReconfigVM_Task, 'duration_secs': 1.338048} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.119098] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Reconfigured VM instance instance-0000002f to attach disk [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.119494] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3bd810-ab92-4338-93ba-8447a6905eeb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.149885] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fc00615-cc77-4bf9-9e6a-6487c22c1ae3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.172547] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520fac8e-48a8-b4bf-9e91-fdfd37cd71d8, 'name': SearchDatastore_Task, 'duration_secs': 0.010943} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.174540] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 813.174540] env[68279]: value = "task-2963013" [ 813.174540] env[68279]: _type = "Task" [ 813.174540] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.174790] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79bd3fe9-7eca-4bcc-9d1f-feb5328751ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.187882] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 813.187882] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d97661-684c-ea48-76a4-89e6ab758038" [ 813.187882] env[68279]: _type = "Task" [ 813.187882] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.197301] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.197566] env[68279]: DEBUG oslo_vmware.api [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203428} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.198219] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.198508] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 813.198602] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.198811] env[68279]: INFO nova.compute.manager [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 813.202354] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 813.202354] env[68279]: DEBUG nova.compute.manager [-] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 813.202354] env[68279]: DEBUG nova.network.neutron [-] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.206310] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d97661-684c-ea48-76a4-89e6ab758038, 'name': SearchDatastore_Task, 'duration_secs': 0.012964} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.206310] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.206310] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 0daf31be-c547-46ae-aa91-f99e191e1c76/0daf31be-c547-46ae-aa91-f99e191e1c76.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 813.206310] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-934e1872-01fd-482a-bcd1-428a3a115746 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.216668] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 813.216668] env[68279]: value = "task-2963014" [ 813.216668] env[68279]: _type = "Task" [ 813.216668] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.223029] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.566023] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.566023] env[68279]: DEBUG nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 813.569768] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.303s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.570140] env[68279]: DEBUG nova.objects.instance [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 813.588977] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067675} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.589548] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 813.591726] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2670cf6-314d-4b25-8def-5575c794b517 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.632933] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] c62a0d0e-8869-482a-a687-c628b96d6e22/c62a0d0e-8869-482a-a687-c628b96d6e22.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 813.635817] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd0cf6f8-6bfa-42f7-9bf3-a7eab5d4ead4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.664774] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 813.664774] env[68279]: value = "task-2963015" [ 813.664774] env[68279]: _type = "Task" [ 813.664774] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.682156] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963015, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.697483] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963013, 'name': ReconfigVM_Task, 'duration_secs': 0.186757} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.697836] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.698668] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90526318-72ff-432d-9564-d56df483dae4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.707554] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 813.707554] env[68279]: value = "task-2963016" [ 813.707554] env[68279]: _type = "Task" [ 813.707554] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.720522] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963016, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.725734] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963014, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.799154] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Acquiring lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.799442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.799664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Acquiring lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.800030] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.800242] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.802420] env[68279]: INFO nova.compute.manager [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Terminating instance [ 813.989441] env[68279]: INFO nova.compute.manager [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Rebuilding instance [ 814.029118] env[68279]: DEBUG nova.compute.manager [req-28e40750-815a-4320-b6ea-a46dd3b09756 req-311ebd18-5734-44b7-8980-4c0cad2305a3 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Received event network-vif-deleted-bd0e2597-abb7-4689-856c-4ad289b6c70d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.029319] env[68279]: INFO nova.compute.manager [req-28e40750-815a-4320-b6ea-a46dd3b09756 req-311ebd18-5734-44b7-8980-4c0cad2305a3 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Neutron deleted interface bd0e2597-abb7-4689-856c-4ad289b6c70d; detaching it from the instance and deleting it from the info cache [ 814.029552] env[68279]: DEBUG nova.network.neutron [req-28e40750-815a-4320-b6ea-a46dd3b09756 req-311ebd18-5734-44b7-8980-4c0cad2305a3 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.047845] env[68279]: DEBUG nova.compute.manager [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.048764] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02aa9f2a-d7a8-4cd0-8d7d-5c205c98a268 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.080268] env[68279]: DEBUG nova.compute.utils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.080873] env[68279]: DEBUG nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.081064] env[68279]: DEBUG nova.network.neutron [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.125793] env[68279]: DEBUG nova.policy [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8831a56664404da3a03d6d8241e693ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91ef3e24b66c44a29463a982c192a06e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.177937] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963015, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.219183] env[68279]: DEBUG oslo_vmware.api [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963016, 'name': PowerOnVM_Task, 'duration_secs': 0.454798} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.222990] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.226862] env[68279]: DEBUG nova.compute.manager [None req-68351cd7-1f52-485b-bdb8-f1a89f7fa98a tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.227498] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cba712c-032a-4ee7-81d0-e3f08cd2d3ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.234551] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518399} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.234607] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 0daf31be-c547-46ae-aa91-f99e191e1c76/0daf31be-c547-46ae-aa91-f99e191e1c76.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 814.235843] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 814.235843] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8941658-befb-427b-9f73-54c7ab39b5d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.246117] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 814.246117] env[68279]: value = "task-2963017" [ 814.246117] env[68279]: _type = "Task" [ 814.246117] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.254268] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963017, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.306861] env[68279]: DEBUG nova.compute.manager [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 814.307105] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.307381] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e363cdb7-f13f-4b05-bdc7-21cf7d7c8844 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.317770] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 814.317770] env[68279]: value = "task-2963018" [ 814.317770] env[68279]: _type = "Task" [ 814.317770] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.326169] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.424698] env[68279]: DEBUG nova.network.neutron [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Successfully created port: 623a6aa7-e812-4756-b223-4c3c8c03b5c9 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.512217] env[68279]: DEBUG nova.network.neutron [-] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.532089] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2845f604-588d-432e-9dc9-5c8f2a8c8f36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.548786] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7681b6a2-1786-4cbc-8882-57ccfda35d0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.598603] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0c94ceda-b4d4-4f28-80b6-c40e51700340 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.030s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.600989] env[68279]: DEBUG nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 814.603705] env[68279]: DEBUG nova.compute.manager [req-28e40750-815a-4320-b6ea-a46dd3b09756 req-311ebd18-5734-44b7-8980-4c0cad2305a3 service nova] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Detach interface failed, port_id=bd0e2597-abb7-4689-856c-4ad289b6c70d, reason: Instance 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 814.604473] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.990s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.604656] env[68279]: DEBUG nova.objects.instance [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 814.680124] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963015, 'name': ReconfigVM_Task, 'duration_secs': 0.716929} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.680610] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Reconfigured VM instance instance-00000035 to attach disk [datastore2] c62a0d0e-8869-482a-a687-c628b96d6e22/c62a0d0e-8869-482a-a687-c628b96d6e22.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 814.681340] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ead6051-c69a-4f87-a68c-842a649525f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.691127] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 814.691127] env[68279]: value = "task-2963019" [ 814.691127] env[68279]: _type = "Task" [ 814.691127] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.699575] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963019, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.755108] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963017, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16531} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.755371] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 814.756926] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e59741e-d68a-4da7-9f53-466a9be5f7f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.774287] env[68279]: INFO nova.compute.manager [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Unrescuing [ 814.774552] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.774704] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquired lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.774869] env[68279]: DEBUG nova.network.neutron [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.787019] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 0daf31be-c547-46ae-aa91-f99e191e1c76/0daf31be-c547-46ae-aa91-f99e191e1c76.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 814.788684] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adb26910-a904-4c55-857b-ea5e899d368e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.813405] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 814.813405] env[68279]: value = "task-2963021" [ 814.813405] env[68279]: _type = "Task" [ 814.813405] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.824130] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963021, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.829837] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963018, 'name': PowerOffVM_Task, 'duration_secs': 0.342641} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.830392] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 814.830673] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 814.830983] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594489', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'name': 'volume-b6edfa04-38f3-4e20-9c83-faca792e474a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5565f0d-ed60-4ac8-bba1-ab46b337dd90', 'attached_at': '', 'detached_at': '', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'serial': 'b6edfa04-38f3-4e20-9c83-faca792e474a'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 814.831965] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7bbb78-f93f-421c-a6a4-5b7b3b78c936 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.856966] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982b82ca-68a5-4a5c-9236-3838706be484 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.863676] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7966a3ae-9655-4104-9e6e-8d7e35af94b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.883410] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b16a445-d10f-45f5-8a7f-07f04d62972a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.899192] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] The volume has not been displaced from its original location: [datastore1] volume-b6edfa04-38f3-4e20-9c83-faca792e474a/volume-b6edfa04-38f3-4e20-9c83-faca792e474a.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 814.904973] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Reconfiguring VM instance instance-00000024 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 814.905325] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a06d3216-5bd4-4dab-adc4-7ea8cc21a0df {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.924342] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 814.924342] env[68279]: value = "task-2963022" [ 814.924342] env[68279]: _type = "Task" [ 814.924342] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.933325] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963022, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.016562] env[68279]: INFO nova.compute.manager [-] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Took 1.81 seconds to deallocate network for instance. [ 815.065855] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 815.065855] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d6349d6-d17b-4980-b105-bbafc41d07aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.073827] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 815.073827] env[68279]: value = "task-2963023" [ 815.073827] env[68279]: _type = "Task" [ 815.073827] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.083629] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.203515] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963019, 'name': Rename_Task, 'duration_secs': 0.257056} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.203939] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 815.204333] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7961ed5d-bbaa-4a4e-9b37-1cb500a61627 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.212562] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 815.212562] env[68279]: value = "task-2963024" [ 815.212562] env[68279]: _type = "Task" [ 815.212562] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.227346] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963024, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.324348] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963021, 'name': ReconfigVM_Task, 'duration_secs': 0.342491} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.324705] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 0daf31be-c547-46ae-aa91-f99e191e1c76/0daf31be-c547-46ae-aa91-f99e191e1c76.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 815.325458] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23bfdff1-d9ea-4bf6-a4a2-55a93018cd29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.332036] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 815.332036] env[68279]: value = "task-2963025" [ 815.332036] env[68279]: _type = "Task" [ 815.332036] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.342692] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963025, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.434773] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963022, 'name': ReconfigVM_Task, 'duration_secs': 0.183778} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.435092] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Reconfigured VM instance instance-00000024 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 815.440480] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e41c200d-e2ef-40bf-8258-197142f6cf10 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.457719] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 815.457719] env[68279]: value = "task-2963026" [ 815.457719] env[68279]: _type = "Task" [ 815.457719] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.467195] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.525874] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.585473] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963023, 'name': PowerOffVM_Task, 'duration_secs': 0.168753} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.585762] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 815.586058] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 815.586904] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2024da-2195-46f5-a686-e06fa16a7c9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.594845] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 815.597449] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cd3b319-22a8-4c54-9886-3b0853ab8f2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.617924] env[68279]: DEBUG nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 815.621224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0153ab64-d672-43b7-8b96-cce1bac6b9e4 tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.622223] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.731s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.622903] env[68279]: DEBUG nova.objects.instance [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'resources' on Instance uuid 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.659658] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 815.659992] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.660373] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 815.660584] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.661650] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 815.662118] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 815.662487] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 815.662761] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 815.663076] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 815.663379] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 815.663715] env[68279]: DEBUG nova.virt.hardware [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 815.665353] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772d09db-8c97-474a-9f19-364b6b018f52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.680139] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f943922-2139-4c92-8d1a-9fc15e243d49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.685856] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.686096] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.686741] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleting the datastore file [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.687621] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5d9b1b7-c75d-4be8-b106-e0eb766a7e02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.708686] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 815.708686] env[68279]: value = "task-2963028" [ 815.708686] env[68279]: _type = "Task" [ 815.708686] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.724608] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.727899] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963024, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.740531] env[68279]: INFO nova.compute.manager [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Unrescuing [ 815.740710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.740900] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.741110] env[68279]: DEBUG nova.network.neutron [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.747219] env[68279]: DEBUG nova.network.neutron [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Updating instance_info_cache with network_info: [{"id": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "address": "fa:16:3e:16:9e:10", "network": {"id": "2bcc63b9-edd1-44cc-bb26-c0cf5fdb1164", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-951861981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "9fd4e00dfba449c5800a22fc37f2c40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25dc4e9c-46", "ovs_interfaceid": "25dc4e9c-46b3-42bc-af52-a7cbc468c28a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.842651] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963025, 'name': Rename_Task, 'duration_secs': 0.141403} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.842651] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 815.842651] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44faf915-5ab7-4ab0-9678-3e80e0ff5945 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.852026] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 815.852026] env[68279]: value = "task-2963029" [ 815.852026] env[68279]: _type = "Task" [ 815.852026] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.858362] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.968394] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963026, 'name': ReconfigVM_Task, 'duration_secs': 0.220122} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.969074] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594489', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'name': 'volume-b6edfa04-38f3-4e20-9c83-faca792e474a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5565f0d-ed60-4ac8-bba1-ab46b337dd90', 'attached_at': '', 'detached_at': '', 'volume_id': 'b6edfa04-38f3-4e20-9c83-faca792e474a', 'serial': 'b6edfa04-38f3-4e20-9c83-faca792e474a'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 815.969247] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 815.970572] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d818c1-1929-4292-a60d-e2e328cd5cab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.977479] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 815.978060] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a22b74ff-7b22-4c1a-84ba-5b38c67e1cc9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.044154] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 816.044456] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 816.044456] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Deleting the datastore file [datastore1] e5565f0d-ed60-4ac8-bba1-ab46b337dd90 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.044722] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc58d011-25bf-4899-95da-637b92b319b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.054275] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for the task: (returnval){ [ 816.054275] env[68279]: value = "task-2963031" [ 816.054275] env[68279]: _type = "Task" [ 816.054275] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.063221] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.098686] env[68279]: DEBUG nova.compute.manager [req-a883bb1c-2866-48b8-9705-1211cc61c3c4 req-cb475c97-54b7-49fa-b15e-595ac1c2dd6a service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Received event network-vif-plugged-623a6aa7-e812-4756-b223-4c3c8c03b5c9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.098844] env[68279]: DEBUG oslo_concurrency.lockutils [req-a883bb1c-2866-48b8-9705-1211cc61c3c4 req-cb475c97-54b7-49fa-b15e-595ac1c2dd6a service nova] Acquiring lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.099364] env[68279]: DEBUG oslo_concurrency.lockutils [req-a883bb1c-2866-48b8-9705-1211cc61c3c4 req-cb475c97-54b7-49fa-b15e-595ac1c2dd6a service nova] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.099621] env[68279]: DEBUG oslo_concurrency.lockutils [req-a883bb1c-2866-48b8-9705-1211cc61c3c4 req-cb475c97-54b7-49fa-b15e-595ac1c2dd6a service nova] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.099885] env[68279]: DEBUG nova.compute.manager [req-a883bb1c-2866-48b8-9705-1211cc61c3c4 req-cb475c97-54b7-49fa-b15e-595ac1c2dd6a service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] No waiting events found dispatching network-vif-plugged-623a6aa7-e812-4756-b223-4c3c8c03b5c9 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.100132] env[68279]: WARNING nova.compute.manager [req-a883bb1c-2866-48b8-9705-1211cc61c3c4 req-cb475c97-54b7-49fa-b15e-595ac1c2dd6a service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Received unexpected event network-vif-plugged-623a6aa7-e812-4756-b223-4c3c8c03b5c9 for instance with vm_state building and task_state spawning. [ 816.131620] env[68279]: DEBUG nova.network.neutron [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Successfully updated port: 623a6aa7-e812-4756-b223-4c3c8c03b5c9 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.226526] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.412603} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.226840] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 816.227051] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 816.227233] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 816.235566] env[68279]: DEBUG oslo_vmware.api [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963024, 'name': PowerOnVM_Task, 'duration_secs': 0.592333} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.238304] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 816.238512] env[68279]: INFO nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Took 10.49 seconds to spawn the instance on the hypervisor. [ 816.238690] env[68279]: DEBUG nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 816.240042] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fb078c-ffdc-4e6a-968c-946b64403d45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.253780] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Releasing lock "refresh_cache-f807e45c-76d8-46a6-a30b-011e7b8df6a4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.254370] env[68279]: DEBUG nova.objects.instance [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lazy-loading 'flavor' on Instance uuid f807e45c-76d8-46a6-a30b-011e7b8df6a4 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 816.363528] env[68279]: DEBUG oslo_vmware.api [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963029, 'name': PowerOnVM_Task, 'duration_secs': 0.46505} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.363784] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 816.364268] env[68279]: INFO nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Took 7.93 seconds to spawn the instance on the hypervisor. [ 816.364480] env[68279]: DEBUG nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 816.365712] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7a2649-70c6-4810-b538-0b75fc13af59 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.563628] env[68279]: DEBUG oslo_vmware.api [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Task: {'id': task-2963031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17356} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.566850] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 816.567072] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 816.567363] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 816.567428] env[68279]: INFO nova.compute.manager [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Took 2.26 seconds to destroy the instance on the hypervisor. [ 816.567670] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 816.568335] env[68279]: DEBUG nova.compute.manager [-] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 816.568442] env[68279]: DEBUG nova.network.neutron [-] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 816.598209] env[68279]: DEBUG nova.network.neutron [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Updating instance_info_cache with network_info: [{"id": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "address": "fa:16:3e:ae:b4:7f", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8067a6f-39", "ovs_interfaceid": "d8067a6f-39fd-42be-8f8e-23d5dea92c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.637740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "refresh_cache-0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.637937] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "refresh_cache-0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.638197] env[68279]: DEBUG nova.network.neutron [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 816.762870] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b619388f-e749-4669-a2ba-55f9e9cf66dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.766340] env[68279]: INFO nova.compute.manager [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Took 48.88 seconds to build instance. [ 816.791353] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 816.795078] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d92d8e0-0bfd-4379-a180-186a2e339e05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.805073] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 816.805073] env[68279]: value = "task-2963032" [ 816.805073] env[68279]: _type = "Task" [ 816.805073] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.819674] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.847381] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c78ff9-bb8f-42b6-a1bc-d3c8f976f049 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.856225] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab720c8a-be87-491a-81fb-10dc534d0ebf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.890853] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb451e9-4d56-43f1-a94e-6a0130fdcd66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.895908] env[68279]: INFO nova.compute.manager [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Took 45.33 seconds to build instance. [ 816.900958] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6da8be2-ed2f-4036-876d-f956d2c1ab62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.914459] env[68279]: DEBUG nova.compute.provider_tree [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.047170] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 817.047449] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594602', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'name': 'volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6ca13774-f4db-4c9c-9da7-b773ce6cc6e7', 'attached_at': '', 'detached_at': '', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'serial': '8b1bb50d-bb4d-4019-9ffa-0979867f8452'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 817.048288] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93364134-2769-499c-a082-7fd41c6290b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.069028] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97ccb6f-69fd-465d-93b1-65765e34412f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.097333] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452/volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.101024] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf22e5fb-7157-48c9-b6ff-c1e659e1a1a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.110741] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-fe92e176-222c-4c46-a254-1c12e21c68d0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.111351] env[68279]: DEBUG nova.objects.instance [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lazy-loading 'flavor' on Instance uuid fe92e176-222c-4c46-a254-1c12e21c68d0 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.122135] env[68279]: DEBUG oslo_vmware.api [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Waiting for the task: (returnval){ [ 817.122135] env[68279]: value = "task-2963033" [ 817.122135] env[68279]: _type = "Task" [ 817.122135] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.129238] env[68279]: DEBUG oslo_vmware.api [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963033, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.193128] env[68279]: DEBUG nova.network.neutron [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.271074] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c3f5f93e-7a7c-45e6-b385-44ae1556ab3b tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.155s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.276218] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 817.276459] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.276613] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 817.276911] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.276965] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 817.277077] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 817.277284] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 817.277449] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 817.277620] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 817.278780] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 817.278780] env[68279]: DEBUG nova.virt.hardware [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 817.278891] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88299f82-8fb0-4a81-8a9d-54df48e756be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.289371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646c44c0-ab8a-4bbf-b66d-f294281b07f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.305917] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:22:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa6d2c82-4995-4c78-98c0-3d13f4d30137', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 817.313916] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 817.314771] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 817.317731] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31624c23-5fb5-46a4-ac1c-2a4a75a2f6f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.339852] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963032, 'name': PowerOffVM_Task, 'duration_secs': 0.481669} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.341097] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 817.346215] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 817.346477] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 817.346477] env[68279]: value = "task-2963034" [ 817.346477] env[68279]: _type = "Task" [ 817.346477] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.346676] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45af8577-59d1-49df-ae95-81ce2ff3dec4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.367750] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 817.367750] env[68279]: value = "task-2963035" [ 817.367750] env[68279]: _type = "Task" [ 817.367750] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.371216] env[68279]: DEBUG nova.network.neutron [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Updating instance_info_cache with network_info: [{"id": "623a6aa7-e812-4756-b223-4c3c8c03b5c9", "address": "fa:16:3e:48:ba:60", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap623a6aa7-e8", "ovs_interfaceid": "623a6aa7-e812-4756-b223-4c3c8c03b5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.372421] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963034, 'name': CreateVM_Task} progress is 15%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.380310] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963035, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.398611] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c042b961-1495-44ef-9dce-5ee7f99b4768 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.679s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.418025] env[68279]: DEBUG nova.scheduler.client.report [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.628553] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ca2eaa-8021-469f-8522-b2a932f54bd1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.637795] env[68279]: DEBUG oslo_vmware.api [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963033, 'name': ReconfigVM_Task, 'duration_secs': 0.508479} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.662117] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Reconfigured VM instance instance-0000000a to attach disk [datastore2] volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452/volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.668269] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.668656] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b51a9169-7cc9-4bb0-9297-5080033c4ad3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.679836] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bce82796-83cf-4cde-860e-ee370502f00f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.688018] env[68279]: DEBUG oslo_vmware.api [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Waiting for the task: (returnval){ [ 817.688018] env[68279]: value = "task-2963037" [ 817.688018] env[68279]: _type = "Task" [ 817.688018] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.688486] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 817.688486] env[68279]: value = "task-2963036" [ 817.688486] env[68279]: _type = "Task" [ 817.688486] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.703025] env[68279]: DEBUG oslo_vmware.api [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963037, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.705393] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963036, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.774893] env[68279]: DEBUG nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 817.879469] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "refresh_cache-0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.879868] env[68279]: DEBUG nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Instance network_info: |[{"id": "623a6aa7-e812-4756-b223-4c3c8c03b5c9", "address": "fa:16:3e:48:ba:60", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap623a6aa7-e8", "ovs_interfaceid": "623a6aa7-e812-4756-b223-4c3c8c03b5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 817.883018] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963034, 'name': CreateVM_Task, 'duration_secs': 0.38331} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.884450] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:ba:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '623a6aa7-e812-4756-b223-4c3c8c03b5c9', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 817.893331] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 817.893483] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 817.894370] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 817.895060] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.895240] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.895619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 817.898925] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b4b4ef1-6763-4f5e-af41-b600dd0dad53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.912739] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76de76e3-dbfa-41f5-97cc-40a92d6d00a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.917886] env[68279]: DEBUG nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 817.920562] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963035, 'name': ReconfigVM_Task, 'duration_secs': 0.302838} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.921674] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 817.921867] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.922654] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.300s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.925393] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef785b93-0e61-48f9-8066-7e4003c9b48b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.928596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.359s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.928826] env[68279]: DEBUG nova.objects.instance [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lazy-loading 'resources' on Instance uuid 01a624d3-782d-44cf-8a4e-05a85ac91c64 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.930650] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 817.930650] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521cec29-a2f1-e141-b013-c7c3c6a17e54" [ 817.930650] env[68279]: _type = "Task" [ 817.930650] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.930892] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 817.930892] env[68279]: value = "task-2963038" [ 817.930892] env[68279]: _type = "Task" [ 817.930892] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.939173] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 817.939173] env[68279]: value = "task-2963039" [ 817.939173] env[68279]: _type = "Task" [ 817.939173] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.950601] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521cec29-a2f1-e141-b013-c7c3c6a17e54, 'name': SearchDatastore_Task, 'duration_secs': 0.011569} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.950801] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963038, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.952212] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.952517] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 817.953374] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.953374] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.953374] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 817.954626] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4d9bdb8-ba5f-4d5a-b0a8-dc3ebbc89b8c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.959992] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.961673] env[68279]: INFO nova.scheduler.client.report [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted allocations for instance 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae [ 817.969414] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 817.969414] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 817.970142] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b248dc3-dc6e-4da4-bc5f-0f0020e975dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.976506] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 817.976506] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7e2a8-a240-062e-d40b-4ebfd884e266" [ 817.976506] env[68279]: _type = "Task" [ 817.976506] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.985871] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7e2a8-a240-062e-d40b-4ebfd884e266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.201178] env[68279]: DEBUG oslo_vmware.api [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963037, 'name': ReconfigVM_Task, 'duration_secs': 0.155287} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.204269] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594602', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'name': 'volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6ca13774-f4db-4c9c-9da7-b773ce6cc6e7', 'attached_at': '', 'detached_at': '', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'serial': '8b1bb50d-bb4d-4019-9ffa-0979867f8452'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 818.205724] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963036, 'name': PowerOffVM_Task, 'duration_secs': 0.219763} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.205991] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 818.213277] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Reconfiguring VM instance instance-0000002f to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 818.213977] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e54970f7-e0c9-49a8-8580-4c92d95b6dac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.233115] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 818.233115] env[68279]: value = "task-2963040" [ 818.233115] env[68279]: _type = "Task" [ 818.233115] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.241779] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963040, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.293526] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.374102] env[68279]: DEBUG nova.network.neutron [-] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.447018] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.447375] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963038, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.457848] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963039, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.479271] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a46bfb86-ae40-4301-a737-f2c8375a7f5e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "1a604a32-78c1-49cf-bafd-e1dc94c8b3ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.026s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.490302] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7e2a8-a240-062e-d40b-4ebfd884e266, 'name': SearchDatastore_Task, 'duration_secs': 0.024247} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.490975] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7372e87c-900e-4c25-885b-d72f6188fe36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.496993] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 818.496993] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529df155-b504-0272-9b37-d42790979378" [ 818.496993] env[68279]: _type = "Task" [ 818.496993] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.509056] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529df155-b504-0272-9b37-d42790979378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.740671] env[68279]: DEBUG nova.compute.manager [req-7f8e5304-755b-4264-870d-27342b3a9c44 req-3d141f27-9210-40c9-a9af-ec5e779d97de service nova] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Received event network-vif-deleted-de55f764-d554-4fcc-bc9d-3987f9c39bc3 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.752616] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963040, 'name': ReconfigVM_Task, 'duration_secs': 0.314149} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.752930] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Reconfigured VM instance instance-0000002f to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 818.753622] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.753622] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8252160a-f1e5-4747-abe8-121228d753f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.767149] env[68279]: DEBUG nova.compute.manager [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Received event network-changed-623a6aa7-e812-4756-b223-4c3c8c03b5c9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.767349] env[68279]: DEBUG nova.compute.manager [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Refreshing instance network info cache due to event network-changed-623a6aa7-e812-4756-b223-4c3c8c03b5c9. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 818.767563] env[68279]: DEBUG oslo_concurrency.lockutils [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] Acquiring lock "refresh_cache-0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.767700] env[68279]: DEBUG oslo_concurrency.lockutils [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] Acquired lock "refresh_cache-0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.767858] env[68279]: DEBUG nova.network.neutron [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Refreshing network info cache for port 623a6aa7-e812-4756-b223-4c3c8c03b5c9 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.772018] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 818.772018] env[68279]: value = "task-2963041" [ 818.772018] env[68279]: _type = "Task" [ 818.772018] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.785834] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963041, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.874914] env[68279]: INFO nova.compute.manager [-] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Took 2.31 seconds to deallocate network for instance. [ 818.942697] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963038, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.957257] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963039, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.007095] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529df155-b504-0272-9b37-d42790979378, 'name': SearchDatastore_Task, 'duration_secs': 0.011703} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.010362] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.010743] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 819.011416] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8af00dc-b7b3-4eb2-81b9-c7857804155d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.018452] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 819.018452] env[68279]: value = "task-2963042" [ 819.018452] env[68279]: _type = "Task" [ 819.018452] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.033534] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.138481] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e82f07-0e72-474e-b86b-8893fe714d22 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.150038] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d23ca1d-a6bf-41b2-a40e-dbb1a6882987 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.190151] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b75ada-195a-477b-a50c-604e98b55761 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.198333] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539de08d-91b7-49d7-bb7c-5413d9d5c6e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.212437] env[68279]: DEBUG nova.compute.provider_tree [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.265790] env[68279]: DEBUG nova.objects.instance [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lazy-loading 'flavor' on Instance uuid 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.284809] env[68279]: DEBUG oslo_vmware.api [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963041, 'name': PowerOnVM_Task, 'duration_secs': 0.41058} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.285019] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.285265] env[68279]: DEBUG nova.compute.manager [None req-fa144424-3226-4f82-a086-1cc7f2d447c9 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.286072] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95f7b84-69a1-439d-9b49-70902930ff56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.447576] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963038, 'name': CreateVM_Task, 'duration_secs': 1.207848} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.447906] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.449097] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.449337] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.449706] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 819.453769] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-518d9529-e3ce-4ef5-8dda-23afe447b374 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.457019] env[68279]: INFO nova.compute.manager [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Took 0.58 seconds to detach 1 volumes for instance. [ 819.459587] env[68279]: DEBUG nova.compute.manager [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Deleting volume: b6edfa04-38f3-4e20-9c83-faca792e474a {{(pid=68279) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 819.466950] env[68279]: DEBUG oslo_vmware.api [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963039, 'name': PowerOnVM_Task, 'duration_secs': 1.090427} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.468575] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.468888] env[68279]: DEBUG nova.compute.manager [None req-e0e07796-d094-4037-bf90-af2d40bb22de tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.469287] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 819.469287] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ab87c4-7c24-decb-e25d-cc9c5b5b001e" [ 819.469287] env[68279]: _type = "Task" [ 819.469287] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.470104] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfd3706-b488-4ff3-a89f-7ac418e6acde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.488213] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ab87c4-7c24-decb-e25d-cc9c5b5b001e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.530833] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963042, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.718593] env[68279]: DEBUG nova.scheduler.client.report [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 819.770495] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86745e4b-bd6b-44a8-a571-74b2f7ba7704 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.379s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.984039] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ab87c4-7c24-decb-e25d-cc9c5b5b001e, 'name': SearchDatastore_Task, 'duration_secs': 0.05395} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.984348] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.984593] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.984828] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.984999] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.985224] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.985494] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6acbd6fd-f95f-4dae-a4ad-9abc5af8df91 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.999020] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.999020] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.999020] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e545265-943d-4782-b106-c5d35a3b1470 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.003678] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 820.003678] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52212705-d868-956a-cb3f-78e147263f9e" [ 820.003678] env[68279]: _type = "Task" [ 820.003678] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.011831] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52212705-d868-956a-cb3f-78e147263f9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.016999] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.028691] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963042, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618389} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.029237] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 820.029452] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.029705] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dedc92b-0e80-47ec-bb8e-f5f47510e84a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.035634] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 820.035634] env[68279]: value = "task-2963044" [ 820.035634] env[68279]: _type = "Task" [ 820.035634] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.043363] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.164712] env[68279]: DEBUG nova.network.neutron [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Updated VIF entry in instance network info cache for port 623a6aa7-e812-4756-b223-4c3c8c03b5c9. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 820.165110] env[68279]: DEBUG nova.network.neutron [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Updating instance_info_cache with network_info: [{"id": "623a6aa7-e812-4756-b223-4c3c8c03b5c9", "address": "fa:16:3e:48:ba:60", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap623a6aa7-e8", "ovs_interfaceid": "623a6aa7-e812-4756-b223-4c3c8c03b5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.224468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.296s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.226829] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.574s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.228915] env[68279]: INFO nova.compute.claims [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.258025] env[68279]: INFO nova.scheduler.client.report [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted allocations for instance 01a624d3-782d-44cf-8a4e-05a85ac91c64 [ 820.518151] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52212705-d868-956a-cb3f-78e147263f9e, 'name': SearchDatastore_Task, 'duration_secs': 0.024891} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.519376] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cd186a2-2ca4-43e8-91dd-48652ec19df2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.528336] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 820.528336] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ec89f-d4f7-4dbc-e7b0-74061e8e69d7" [ 820.528336] env[68279]: _type = "Task" [ 820.528336] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.537197] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ec89f-d4f7-4dbc-e7b0-74061e8e69d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.545078] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06239} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.545589] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 820.546937] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546e98b5-061f-4958-8f2c-7170c93b2aa5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.575721] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.576583] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bc599e4-dccd-492c-9c96-6c5210992fd1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.601187] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 820.601187] env[68279]: value = "task-2963045" [ 820.601187] env[68279]: _type = "Task" [ 820.601187] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.611986] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.667920] env[68279]: DEBUG oslo_concurrency.lockutils [req-b261ba35-ed16-4313-ae0b-9d99b16cdf68 req-b29109a0-7980-4575-bcde-451cc842beee service nova] Releasing lock "refresh_cache-0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.763241] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47917e5c-f924-456a-b4dd-1904eca3ff58 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "01a624d3-782d-44cf-8a4e-05a85ac91c64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.209s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.961072] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.961072] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.044786] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ec89f-d4f7-4dbc-e7b0-74061e8e69d7, 'name': SearchDatastore_Task, 'duration_secs': 0.057133} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.044786] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.044786] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a/0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.044786] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17baa093-acc9-422a-b7fa-45f8607e837f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.052087] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 821.052087] env[68279]: value = "task-2963046" [ 821.052087] env[68279]: _type = "Task" [ 821.052087] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.063330] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.113682] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963045, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.568531] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963046, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.617797] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963045, 'name': ReconfigVM_Task, 'duration_secs': 0.640837} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.620488] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a/4021edd3-346e-44e5-9419-38181cc91c6a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.620488] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d1fd21f-da4c-4edf-b1ba-e108da97a556 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.637362] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 821.637362] env[68279]: value = "task-2963047" [ 821.637362] env[68279]: _type = "Task" [ 821.637362] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.646513] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963047, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.807287] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.808105] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing instance network info cache due to event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 821.808105] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquiring lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.808105] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquired lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.808105] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.896126] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be7cf3c-0869-4976-ad2c-0fdb3be2da6c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.904166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc64e793-c8fa-4357-becb-fc4ce9f2dea8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.938672] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c99c74-148f-4927-8744-19cd06418c23 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.948199] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8206a59-9a4d-4c24-97d9-facfcfeb53f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.964159] env[68279]: DEBUG nova.compute.provider_tree [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.063282] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963046, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621907} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.063574] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a/0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.064031] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.064126] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24a891ae-bd48-489b-b4f5-77043a5ebc7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.070804] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 822.070804] env[68279]: value = "task-2963048" [ 822.070804] env[68279]: _type = "Task" [ 822.070804] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.080102] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.145660] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963047, 'name': Rename_Task, 'duration_secs': 0.160012} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.145973] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 822.147043] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b2c2c5f-37e2-4f0c-9440-f0e251e28183 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.153804] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 822.153804] env[68279]: value = "task-2963049" [ 822.153804] env[68279]: _type = "Task" [ 822.153804] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.163422] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963049, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.244936] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.245229] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.245437] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.245614] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.245777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.248056] env[68279]: INFO nova.compute.manager [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Terminating instance [ 822.471314] env[68279]: DEBUG nova.scheduler.client.report [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.585288] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.23481} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.585672] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.586854] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9c1ab0-9a3c-4e1c-bbf3-cfd1e57f6581 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.626593] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a/0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.627480] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26881513-5d2f-4e10-abb2-9d8a877b2348 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.660741] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 822.660741] env[68279]: value = "task-2963050" [ 822.660741] env[68279]: _type = "Task" [ 822.660741] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.670492] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963049, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.678231] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963050, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.755036] env[68279]: DEBUG nova.compute.manager [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 822.755036] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.755036] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab31f3f-9608-44c6-b27f-2f0efdc5613e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.761445] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 822.761445] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-534204a3-7ebb-42a7-bac3-c26bd2b4b19e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.768285] env[68279]: DEBUG oslo_vmware.api [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 822.768285] env[68279]: value = "task-2963051" [ 822.768285] env[68279]: _type = "Task" [ 822.768285] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.773349] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Acquiring lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.773632] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.777904] env[68279]: DEBUG oslo_vmware.api [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.778961] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updated VIF entry in instance network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.779344] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [{"id": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "address": "fa:16:3e:88:96:e0", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06c5c0-cc", "ovs_interfaceid": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.978946] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.979508] env[68279]: DEBUG nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 822.982412] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.325s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.982667] env[68279]: DEBUG nova.objects.instance [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lazy-loading 'resources' on Instance uuid 43f629d6-bdc3-4345-97ec-26ce2c9d7be7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.168069] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963049, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.174430] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963050, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.279904] env[68279]: INFO nova.compute.manager [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Detaching volume 8b1bb50d-bb4d-4019-9ffa-0979867f8452 [ 823.281695] env[68279]: DEBUG oslo_vmware.api [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.284695] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Releasing lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.284695] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Received event network-changed-21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.284695] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Refreshing instance network info cache due to event network-changed-21e228fc-8c90-47b4-ae9b-5e46f3ad748b. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 823.284695] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquiring lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.284695] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquired lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.284695] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Refreshing network info cache for port 21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.325856] env[68279]: INFO nova.virt.block_device [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Attempting to driver detach volume 8b1bb50d-bb4d-4019-9ffa-0979867f8452 from mountpoint /dev/sdb [ 823.326132] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 823.326418] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594602', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'name': 'volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6ca13774-f4db-4c9c-9da7-b773ce6cc6e7', 'attached_at': '', 'detached_at': '', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'serial': '8b1bb50d-bb4d-4019-9ffa-0979867f8452'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 823.327769] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652b2af2-a38b-4395-a038-2b6558752456 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.358886] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28843fa8-e346-46bc-bdec-b97598036e0c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.369527] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4eca715-6f46-4744-8598-5190d3873c37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.393209] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc3a878-8383-463f-a243-4dcfc9363e56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.410248] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] The volume has not been displaced from its original location: [datastore2] volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452/volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 823.415530] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Reconfiguring VM instance instance-0000000a to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 823.415896] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f399713-f843-4751-9078-9132426a90f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.436410] env[68279]: DEBUG oslo_vmware.api [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Waiting for the task: (returnval){ [ 823.436410] env[68279]: value = "task-2963052" [ 823.436410] env[68279]: _type = "Task" [ 823.436410] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.445448] env[68279]: DEBUG oslo_vmware.api [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963052, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.496797] env[68279]: DEBUG nova.compute.utils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 823.499885] env[68279]: DEBUG nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 823.499885] env[68279]: DEBUG nova.network.neutron [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.586735] env[68279]: DEBUG nova.policy [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71136053ec324086b94bc03ed7b649bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63e6688a23df4c48af9c2f37a97caeb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 823.667904] env[68279]: DEBUG oslo_vmware.api [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963049, 'name': PowerOnVM_Task, 'duration_secs': 1.095272} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.671498] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 823.672386] env[68279]: DEBUG nova.compute.manager [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 823.672811] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30efe350-db6e-4e07-8d58-b13b61b8666c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.687901] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963050, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.779061] env[68279]: DEBUG oslo_vmware.api [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963051, 'name': PowerOffVM_Task, 'duration_secs': 0.784753} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.779492] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.779492] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.782905] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afb0fdb3-ea53-4de6-844f-900a012abc3e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.949586] env[68279]: DEBUG oslo_vmware.api [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963052, 'name': ReconfigVM_Task, 'duration_secs': 0.257019} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.951176] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Reconfigured VM instance instance-0000000a to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 823.957609] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b45c3178-ebc5-441a-80d2-83ed6bb98308 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.976256] env[68279]: DEBUG oslo_vmware.api [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Waiting for the task: (returnval){ [ 823.976256] env[68279]: value = "task-2963054" [ 823.976256] env[68279]: _type = "Task" [ 823.976256] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.984853] env[68279]: DEBUG oslo_vmware.api [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963054, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.003486] env[68279]: DEBUG nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.062272] env[68279]: DEBUG nova.compute.manager [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Received event network-changed-81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.062444] env[68279]: DEBUG nova.compute.manager [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Refreshing instance network info cache due to event network-changed-81109740-d2ff-4cc1-babf-d587de399274. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 824.062647] env[68279]: DEBUG oslo_concurrency.lockutils [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] Acquiring lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.062848] env[68279]: DEBUG oslo_concurrency.lockutils [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] Acquired lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.062977] env[68279]: DEBUG nova.network.neutron [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Refreshing network info cache for port 81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.124518] env[68279]: DEBUG nova.network.neutron [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Successfully created port: f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.180358] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963050, 'name': ReconfigVM_Task, 'duration_secs': 1.060703} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.180666] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a/0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.181411] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-847fb369-e7cc-4886-805e-3e4c318be8f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.187986] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 824.187986] env[68279]: value = "task-2963055" [ 824.187986] env[68279]: _type = "Task" [ 824.187986] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.189651] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90d68af-34e3-46a4-995d-3b8bdc17067e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.193311] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Updated VIF entry in instance network info cache for port 21e228fc-8c90-47b4-ae9b-5e46f3ad748b. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.193643] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Updating instance_info_cache with network_info: [{"id": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "address": "fa:16:3e:f7:ae:f0", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21e228fc-8c", "ovs_interfaceid": "21e228fc-8c90-47b4-ae9b-5e46f3ad748b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.211989] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.213431] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592bb76b-baae-4033-92fc-8465f3246537 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.222622] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963055, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.252976] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8390a246-5b98-488d-ac5e-3eba03e7f9db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.261458] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973f8800-3c23-4a46-accc-7a349c3a40e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.277788] env[68279]: DEBUG nova.compute.provider_tree [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.294033] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "0daf31be-c547-46ae-aa91-f99e191e1c76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.294408] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.294648] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.294858] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.295076] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.297846] env[68279]: INFO nova.compute.manager [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Terminating instance [ 824.490384] env[68279]: DEBUG oslo_vmware.api [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Task: {'id': task-2963054, 'name': ReconfigVM_Task, 'duration_secs': 0.155424} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.490810] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594602', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'name': 'volume-8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6ca13774-f4db-4c9c-9da7-b773ce6cc6e7', 'attached_at': '', 'detached_at': '', 'volume_id': '8b1bb50d-bb4d-4019-9ffa-0979867f8452', 'serial': '8b1bb50d-bb4d-4019-9ffa-0979867f8452'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 824.702417] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Releasing lock "refresh_cache-c62a0d0e-8869-482a-a687-c628b96d6e22" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.702417] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.702417] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing instance network info cache due to event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 824.702417] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquiring lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.702417] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquired lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.702417] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.703513] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963055, 'name': Rename_Task, 'duration_secs': 0.28434} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.704156] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.704456] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13fa72e9-6434-4877-827b-0b2d7a881d14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.711327] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 824.711327] env[68279]: value = "task-2963056" [ 824.711327] env[68279]: _type = "Task" [ 824.711327] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.726431] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.781304] env[68279]: DEBUG nova.scheduler.client.report [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.806079] env[68279]: DEBUG nova.network.neutron [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updated VIF entry in instance network info cache for port 81109740-d2ff-4cc1-babf-d587de399274. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.806703] env[68279]: DEBUG nova.network.neutron [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updating instance_info_cache with network_info: [{"id": "81109740-d2ff-4cc1-babf-d587de399274", "address": "fa:16:3e:9f:e4:40", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81109740-d2", "ovs_interfaceid": "81109740-d2ff-4cc1-babf-d587de399274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.808496] env[68279]: DEBUG nova.compute.manager [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 824.809609] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 824.811140] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bb1d68-e31a-4ad3-a208-8ed2e5c7acb5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.821028] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 824.821028] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2dccaa73-994e-409f-ab87-6b1c52470d7d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.827426] env[68279]: DEBUG oslo_vmware.api [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 824.827426] env[68279]: value = "task-2963057" [ 824.827426] env[68279]: _type = "Task" [ 824.827426] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.836465] env[68279]: DEBUG oslo_vmware.api [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.021605] env[68279]: DEBUG nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.050036] env[68279]: DEBUG nova.virt.hardware [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.050511] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca8b8db-4d76-45c2-a327-07e756457b6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.056405] env[68279]: DEBUG nova.objects.instance [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lazy-loading 'flavor' on Instance uuid 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 825.065434] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162df62d-8c7c-44eb-aeb7-8cab24a32b32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.221733] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963056, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.287038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.304s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.289604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.554s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.291599] env[68279]: INFO nova.compute.claims [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.302700] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 825.302944] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 825.303147] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Deleting the datastore file [datastore1] f807e45c-76d8-46a6-a30b-011e7b8df6a4 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 825.303435] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2d2dcd4-26a6-4e92-bc97-d590cc246e47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.311049] env[68279]: DEBUG oslo_concurrency.lockutils [req-183048d2-7954-4f00-8cf8-0f43ac0cd4f6 req-65b8ece7-afae-4f36-835c-c6b4c41048ca service nova] Releasing lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.312661] env[68279]: DEBUG oslo_vmware.api [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 825.312661] env[68279]: value = "task-2963058" [ 825.312661] env[68279]: _type = "Task" [ 825.312661] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.320938] env[68279]: DEBUG oslo_vmware.api [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.323530] env[68279]: INFO nova.scheduler.client.report [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Deleted allocations for instance 43f629d6-bdc3-4345-97ec-26ce2c9d7be7 [ 825.338840] env[68279]: DEBUG oslo_vmware.api [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963057, 'name': PowerOffVM_Task, 'duration_secs': 0.385229} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.341893] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 825.342261] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 825.342712] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca3c7840-d86e-4921-922b-1221e1b5aa36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.420163] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 825.420566] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 825.420896] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Deleting the datastore file [datastore1] 0daf31be-c547-46ae-aa91-f99e191e1c76 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 825.421624] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ace435f-630d-4b79-8e8e-df31735ef3e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.433122] env[68279]: DEBUG oslo_vmware.api [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 825.433122] env[68279]: value = "task-2963060" [ 825.433122] env[68279]: _type = "Task" [ 825.433122] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.444345] env[68279]: DEBUG oslo_vmware.api [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.458070] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updated VIF entry in instance network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 825.458070] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [{"id": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "address": "fa:16:3e:88:96:e0", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06c5c0-cc", "ovs_interfaceid": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.722840] env[68279]: DEBUG oslo_vmware.api [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963056, 'name': PowerOnVM_Task, 'duration_secs': 0.779457} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.723257] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.723375] env[68279]: INFO nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Took 10.11 seconds to spawn the instance on the hypervisor. [ 825.723916] env[68279]: DEBUG nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.724562] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46500169-6b69-4a3a-8c6e-7c8f72b52e8e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.826406] env[68279]: DEBUG oslo_vmware.api [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.485745} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.826406] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.826406] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.829711] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.829711] env[68279]: INFO nova.compute.manager [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Took 3.07 seconds to destroy the instance on the hypervisor. [ 825.829711] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.830813] env[68279]: DEBUG nova.compute.manager [-] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 825.831168] env[68279]: DEBUG nova.network.neutron [-] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 825.833970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0e7cb4a4-6e77-46ad-8352-1444f9eefc8d tempest-ServersListShow296Test-1730500829 tempest-ServersListShow296Test-1730500829-project-member] Lock "43f629d6-bdc3-4345-97ec-26ce2c9d7be7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.911s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.944757] env[68279]: DEBUG oslo_vmware.api [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.425598} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.944925] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.946026] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.946026] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.946026] env[68279]: INFO nova.compute.manager [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Took 1.14 seconds to destroy the instance on the hypervisor. [ 825.946026] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.946026] env[68279]: DEBUG nova.compute.manager [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 825.946361] env[68279]: DEBUG nova.network.neutron [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 825.959606] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Releasing lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.959873] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Received event network-changed-81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.961252] env[68279]: DEBUG nova.compute.manager [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Refreshing instance network info cache due to event network-changed-81109740-d2ff-4cc1-babf-d587de399274. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 825.961252] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquiring lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.961252] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Acquired lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.961252] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Refreshing network info cache for port 81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.067735] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8f079db5-5def-41b0-80da-50355098df30 tempest-VolumesAssistedSnapshotsTest-1027198884 tempest-VolumesAssistedSnapshotsTest-1027198884-project-admin] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.294s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.244939] env[68279]: INFO nova.compute.manager [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Took 47.20 seconds to build instance. [ 826.288039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.288589] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.352534] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "4021edd3-346e-44e5-9419-38181cc91c6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.353360] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "4021edd3-346e-44e5-9419-38181cc91c6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.354717] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "4021edd3-346e-44e5-9419-38181cc91c6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.354831] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "4021edd3-346e-44e5-9419-38181cc91c6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.355045] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "4021edd3-346e-44e5-9419-38181cc91c6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.360160] env[68279]: INFO nova.compute.manager [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Terminating instance [ 826.484616] env[68279]: INFO nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Port 81109740-d2ff-4cc1-babf-d587de399274 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 826.484616] env[68279]: DEBUG nova.network.neutron [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.592141] env[68279]: DEBUG nova.network.neutron [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Successfully updated port: f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.749656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ecb5c33a-1250-45b7-8774-244c909fa0cb tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.596s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.750956] env[68279]: DEBUG nova.network.neutron [-] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.810287] env[68279]: DEBUG nova.network.neutron [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.864655] env[68279]: DEBUG nova.compute.manager [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 826.865134] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.865828] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a963f4fc-a07a-40b4-95e6-d5facf133594 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.876861] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.877145] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7d63fe8-33e1-4ab9-b30c-03f299cffac6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.886150] env[68279]: DEBUG oslo_vmware.api [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 826.886150] env[68279]: value = "task-2963062" [ 826.886150] env[68279]: _type = "Task" [ 826.886150] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.898503] env[68279]: DEBUG oslo_vmware.api [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.912806] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54223cc2-0aa4-4893-ab8d-9d31325912d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.920195] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4341c910-b691-4ff7-8c8c-424f47a86f40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.952099] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7040e243-ed41-4193-9fca-97c1ae87a5fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.960206] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a973642e-473f-4b4f-97b3-cff685817d44 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.975827] env[68279]: DEBUG nova.compute.provider_tree [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.979302] env[68279]: DEBUG nova.compute.manager [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 826.979433] env[68279]: DEBUG nova.compute.manager [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing instance network info cache due to event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 826.979651] env[68279]: DEBUG oslo_concurrency.lockutils [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] Acquiring lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.979931] env[68279]: DEBUG oslo_concurrency.lockutils [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] Acquired lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.979991] env[68279]: DEBUG nova.network.neutron [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.984471] env[68279]: DEBUG oslo_concurrency.lockutils [req-eb0246d8-6ec8-4eb3-b023-dab05a2bdf07 req-d65057bf-7ad9-46a4-864d-37c5363fa6e3 service nova] Releasing lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.095465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.095664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.095764] env[68279]: DEBUG nova.network.neutron [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.101057] env[68279]: DEBUG nova.compute.manager [req-76009ff1-3553-47e9-b728-1ae64a6d65e8 req-4fbfd5c4-a811-412d-9602-38fbb7e8a823 service nova] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Received event network-vif-deleted-25dc4e9c-46b3-42bc-af52-a7cbc468c28a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 827.252724] env[68279]: DEBUG nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 827.255582] env[68279]: INFO nova.compute.manager [-] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Took 1.42 seconds to deallocate network for instance. [ 827.312881] env[68279]: INFO nova.compute.manager [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Took 1.37 seconds to deallocate network for instance. [ 827.399461] env[68279]: DEBUG oslo_vmware.api [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963062, 'name': PowerOffVM_Task, 'duration_secs': 0.322098} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.399574] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.399706] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.400029] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1c12888-203c-46c2-a8d8-0c964347c049 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.420791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "5cbe4915-5b01-4424-96c8-f3225e512c89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.420791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.479918] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 827.479918] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 827.479918] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleting the datastore file [datastore2] 4021edd3-346e-44e5-9419-38181cc91c6a {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.479918] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58730cba-530a-46f8-a2b7-bae3757c56d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.488765] env[68279]: DEBUG oslo_vmware.api [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 827.488765] env[68279]: value = "task-2963064" [ 827.488765] env[68279]: _type = "Task" [ 827.488765] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.497239] env[68279]: DEBUG oslo_vmware.api [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963064, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.503527] env[68279]: ERROR nova.scheduler.client.report [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [req-f3dc96c9-3a53-4519-8036-5ffcae03208e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f3dc96c9-3a53-4519-8036-5ffcae03208e"}]} [ 827.519854] env[68279]: DEBUG nova.scheduler.client.report [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 827.536598] env[68279]: DEBUG nova.scheduler.client.report [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 827.537100] env[68279]: DEBUG nova.compute.provider_tree [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 827.556401] env[68279]: DEBUG nova.scheduler.client.report [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 827.589277] env[68279]: DEBUG nova.scheduler.client.report [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 827.598578] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.598824] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.648923] env[68279]: DEBUG nova.network.neutron [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.762370] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.781475] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.819483] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.894526] env[68279]: DEBUG nova.network.neutron [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating instance_info_cache with network_info: [{"id": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "address": "fa:16:3e:0c:82:62", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1c4e041-ce", "ovs_interfaceid": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.939224] env[68279]: DEBUG nova.network.neutron [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updated VIF entry in instance network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.939224] env[68279]: DEBUG nova.network.neutron [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [{"id": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "address": "fa:16:3e:88:96:e0", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06c5c0-cc", "ovs_interfaceid": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.004101] env[68279]: DEBUG oslo_vmware.api [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.007215] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.007464] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.007599] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.007770] env[68279]: INFO nova.compute.manager [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 828.008032] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 828.008695] env[68279]: DEBUG nova.compute.manager [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 828.008811] env[68279]: DEBUG nova.network.neutron [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.357683] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e8cd37-8278-4c9d-a313-b5937580368b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.366155] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31f78e6-f73c-4ab0-bc4c-620021fa5a7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.409525] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.409899] env[68279]: DEBUG nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Instance network_info: |[{"id": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "address": "fa:16:3e:0c:82:62", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1c4e041-ce", "ovs_interfaceid": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 828.411941] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:82:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1c4e041-ced5-433b-a721-e9fa16d159ce', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.421151] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Creating folder: Project (63e6688a23df4c48af9c2f37a97caeb1). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.422029] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5b9292-2a1b-4386-b5dd-3be64036ff19 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.427300] env[68279]: DEBUG nova.network.neutron [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.427300] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-338d2b4a-02c5-43e8-a36c-fb84e0a552ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.435680] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26df366d-89c3-47fa-839a-d815d03537dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.444772] env[68279]: DEBUG oslo_concurrency.lockutils [req-0691958e-742d-4bc7-9d4e-bd51683795e6 req-1c44785a-aa0d-43e1-a9ae-755d34be85cb service nova] Releasing lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.445338] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Created folder: Project (63e6688a23df4c48af9c2f37a97caeb1) in parent group-v594445. [ 828.445413] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Creating folder: Instances. Parent ref: group-v594605. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.446649] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-251e45d7-c81e-40a6-b2f5-58e764576bad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.459226] env[68279]: DEBUG nova.compute.provider_tree [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 828.469141] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Created folder: Instances in parent group-v594605. [ 828.470073] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 828.470073] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3763645-5a78-4929-98a3-108e72071211] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 828.470073] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cdd0ca6-ab56-4132-a756-d48588bbbee8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.490613] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.490613] env[68279]: value = "task-2963067" [ 828.490613] env[68279]: _type = "Task" [ 828.490613] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.500335] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963067, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.807161] env[68279]: DEBUG nova.objects.instance [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lazy-loading 'flavor' on Instance uuid f7db383a-648a-4984-ae25-72bc2ccfe369 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.931818] env[68279]: INFO nova.compute.manager [-] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Took 0.92 seconds to deallocate network for instance. [ 829.005930] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963067, 'name': CreateVM_Task, 'duration_secs': 0.439517} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.005930] env[68279]: DEBUG nova.scheduler.client.report [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 83 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 829.005930] env[68279]: DEBUG nova.compute.provider_tree [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 83 to 84 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 829.005930] env[68279]: DEBUG nova.compute.provider_tree [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.009856] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3763645-5a78-4929-98a3-108e72071211] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.010861] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.011207] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.011640] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 829.012125] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-179bb8e9-6139-4499-929f-6010a2eb9cff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.018791] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 829.018791] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ac7081-e663-52d9-9cfe-db848e89986f" [ 829.018791] env[68279]: _type = "Task" [ 829.018791] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.030852] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ac7081-e663-52d9-9cfe-db848e89986f, 'name': SearchDatastore_Task, 'duration_secs': 0.010342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.031263] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.031602] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.031969] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.034018] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.034018] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.034018] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95b748b0-58c7-477d-a187-c9b5f7fa0829 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.041570] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.041883] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.042693] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-273c6735-28af-4000-9165-ec83839d1844 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.052106] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 829.052106] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52118fc9-c752-988f-39be-fc66fa2a5d0d" [ 829.052106] env[68279]: _type = "Task" [ 829.052106] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.057968] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52118fc9-c752-988f-39be-fc66fa2a5d0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.157148] env[68279]: DEBUG nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Received event network-vif-plugged-f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 829.157148] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.157148] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Lock "e3763645-5a78-4929-98a3-108e72071211-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.157148] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Lock "e3763645-5a78-4929-98a3-108e72071211-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.157148] env[68279]: DEBUG nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] No waiting events found dispatching network-vif-plugged-f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 829.157488] env[68279]: WARNING nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Received unexpected event network-vif-plugged-f1c4e041-ced5-433b-a721-e9fa16d159ce for instance with vm_state building and task_state spawning. [ 829.157488] env[68279]: DEBUG nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Received event network-changed-f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 829.157488] env[68279]: DEBUG nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Refreshing instance network info cache due to event network-changed-f1c4e041-ced5-433b-a721-e9fa16d159ce. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 829.157635] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Acquiring lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.157840] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Acquired lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.157930] env[68279]: DEBUG nova.network.neutron [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Refreshing network info cache for port f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.224139] env[68279]: DEBUG nova.compute.manager [req-f6b6d913-3274-4ed8-a3cb-2ba93054a230 req-253f350b-34f1-46d9-b10c-6a8e6faeaf55 service nova] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Received event network-vif-deleted-81109740-d2ff-4cc1-babf-d587de399274 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 829.267974] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "0daf31be-c547-46ae-aa91-f99e191e1c76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.315845] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.316030] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquired lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.439596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.512120] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.223s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.512614] env[68279]: DEBUG nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.515659] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 41.658s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.567038] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52118fc9-c752-988f-39be-fc66fa2a5d0d, 'name': SearchDatastore_Task, 'duration_secs': 0.008618} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.568502] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7575836b-5718-404e-8e00-18a1f729333e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.576310] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 829.576310] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520b1d59-194c-3578-1002-c1b4be4d00f3" [ 829.576310] env[68279]: _type = "Task" [ 829.576310] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.589672] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520b1d59-194c-3578-1002-c1b4be4d00f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.891284] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.891567] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.891750] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.891937] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.892132] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.894910] env[68279]: INFO nova.compute.manager [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Terminating instance [ 829.926086] env[68279]: DEBUG nova.compute.manager [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.927585] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab7c143-5f5e-4634-8b76-d2786f7ba7d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.988958] env[68279]: DEBUG nova.network.neutron [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Updated VIF entry in instance network info cache for port f1c4e041-ced5-433b-a721-e9fa16d159ce. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.989502] env[68279]: DEBUG nova.network.neutron [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating instance_info_cache with network_info: [{"id": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "address": "fa:16:3e:0c:82:62", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1c4e041-ce", "ovs_interfaceid": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.019471] env[68279]: DEBUG nova.compute.utils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 830.022800] env[68279]: INFO nova.compute.claims [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.027111] env[68279]: DEBUG nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 830.087439] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520b1d59-194c-3578-1002-c1b4be4d00f3, 'name': SearchDatastore_Task, 'duration_secs': 0.012442} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.087742] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.088026] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e3763645-5a78-4929-98a3-108e72071211/e3763645-5a78-4929-98a3-108e72071211.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 830.088286] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbfcf4a6-55f7-4b1d-a5b3-146769dd9983 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.097484] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 830.097484] env[68279]: value = "task-2963068" [ 830.097484] env[68279]: _type = "Task" [ 830.097484] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.110986] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.273407] env[68279]: DEBUG nova.network.neutron [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.401492] env[68279]: DEBUG nova.compute.manager [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 830.401492] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.401647] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7efbf06-c414-4a9e-854e-6b30b38aaef4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.410159] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.410451] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-320f7133-9364-4f98-aeac-f831ab2842e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.419783] env[68279]: DEBUG oslo_vmware.api [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 830.419783] env[68279]: value = "task-2963069" [ 830.419783] env[68279]: _type = "Task" [ 830.419783] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.430606] env[68279]: DEBUG oslo_vmware.api [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2963069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.444577] env[68279]: INFO nova.compute.manager [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] instance snapshotting [ 830.451369] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db00c61-c89a-4b28-b1bd-07ecf8a74879 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.476932] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baf2ea5-f76b-4a9d-a8b4-10122c76166c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.492930] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Releasing lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.493252] env[68279]: DEBUG nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.493802] env[68279]: DEBUG nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing instance network info cache due to event network-changed-ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 830.493802] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Acquiring lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.493967] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Acquired lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.494275] env[68279]: DEBUG nova.network.neutron [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Refreshing network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.530541] env[68279]: INFO nova.compute.resource_tracker [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating resource usage from migration 7ecdeeb2-b081-4af5-b57a-62f05991fa32 [ 830.535933] env[68279]: DEBUG nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.611714] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510127} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.613883] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e3763645-5a78-4929-98a3-108e72071211/e3763645-5a78-4929-98a3-108e72071211.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 830.613883] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.613883] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7cea20c8-d85d-4945-8adf-49ed761af78c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.622292] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 830.622292] env[68279]: value = "task-2963070" [ 830.622292] env[68279]: _type = "Task" [ 830.622292] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.631052] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963070, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.929737] env[68279]: DEBUG oslo_vmware.api [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2963069, 'name': PowerOffVM_Task, 'duration_secs': 0.274655} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.930105] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.930839] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.930839] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e786773-4473-4e41-a95e-8113ae0b207b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.990918] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 830.990918] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cc74f23c-479d-4f44-8172-624054aa5914 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.012168] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 831.012168] env[68279]: value = "task-2963072" [ 831.012168] env[68279]: _type = "Task" [ 831.012168] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.025770] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963072, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.033396] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 831.033396] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 831.034131] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Deleting the datastore file [datastore2] 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 831.034131] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee1d25c6-d8fc-44f0-9b60-313cc7643a7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.060366] env[68279]: DEBUG oslo_vmware.api [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for the task: (returnval){ [ 831.060366] env[68279]: value = "task-2963073" [ 831.060366] env[68279]: _type = "Task" [ 831.060366] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.067912] env[68279]: DEBUG oslo_vmware.api [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2963073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.136993] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963070, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070763} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.137494] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.140280] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a320ee2-dc50-49af-9e45-0f099db728b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.167226] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] e3763645-5a78-4929-98a3-108e72071211/e3763645-5a78-4929-98a3-108e72071211.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.172950] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7308448f-8372-4422-b9aa-33cb3965e1d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.198192] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 831.198192] env[68279]: value = "task-2963074" [ 831.198192] env[68279]: _type = "Task" [ 831.198192] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.215981] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.291086] env[68279]: DEBUG nova.network.neutron [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.309375] env[68279]: DEBUG nova.network.neutron [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updated VIF entry in instance network info cache for port ad06c5c0-cc93-4b02-968c-9e81681ae50a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.309615] env[68279]: DEBUG nova.network.neutron [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [{"id": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "address": "fa:16:3e:88:96:e0", "network": {"id": "970a1d74-3b9b-4170-878f-3ee79b936152", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2047471724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fadbbd31a4314d12a378689150d3a24d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06c5c0-cc", "ovs_interfaceid": "ad06c5c0-cc93-4b02-968c-9e81681ae50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.381726] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c3c301-ee84-4108-8894-fd8e692345e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.389339] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573f2493-79f5-4c11-8bde-d09f99656578 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.422135] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ad626c-c8a0-4c19-8f54-810507212697 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.432115] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230b6933-de2f-4128-a3bd-410d7fbe3323 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.437256] env[68279]: DEBUG nova.compute.manager [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Received event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.437488] env[68279]: DEBUG nova.compute.manager [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing instance network info cache due to event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 831.437684] env[68279]: DEBUG oslo_concurrency.lockutils [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] Acquiring lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.449322] env[68279]: DEBUG nova.compute.provider_tree [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 831.522615] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963072, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.555943] env[68279]: DEBUG nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.568433] env[68279]: DEBUG oslo_vmware.api [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Task: {'id': task-2963073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136146} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.568702] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 831.568911] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 831.569114] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 831.569293] env[68279]: INFO nova.compute.manager [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Took 1.17 seconds to destroy the instance on the hypervisor. [ 831.569532] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 831.569721] env[68279]: DEBUG nova.compute.manager [-] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 831.569818] env[68279]: DEBUG nova.network.neutron [-] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 831.595566] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.595566] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.595566] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.595566] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.595566] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.595566] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.596097] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.596396] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.596680] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.596953] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.597281] env[68279]: DEBUG nova.virt.hardware [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.598285] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5ad7f1-7182-4dfb-af45-b8f8b7d29877 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.607794] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e0ea40-0311-488d-bd57-eba1701a667d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.622097] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.628445] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Creating folder: Project (622bf2edce6f4c899a8fb819568e61c5). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.629071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "336b7399-b64e-411f-99bc-ba0d292e371a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.629202] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "336b7399-b64e-411f-99bc-ba0d292e371a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.629397] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "336b7399-b64e-411f-99bc-ba0d292e371a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.629573] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "336b7399-b64e-411f-99bc-ba0d292e371a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.629733] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "336b7399-b64e-411f-99bc-ba0d292e371a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.631446] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56f574e4-8d40-47c6-8ce3-24347c9899f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.633596] env[68279]: INFO nova.compute.manager [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Terminating instance [ 831.647016] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Created folder: Project (622bf2edce6f4c899a8fb819568e61c5) in parent group-v594445. [ 831.647226] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Creating folder: Instances. Parent ref: group-v594608. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.648083] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-330de123-8b7b-4273-a7b9-2e73ca29341e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.659943] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Created folder: Instances in parent group-v594608. [ 831.659943] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 831.660127] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.660364] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eec56efa-d707-4a00-a146-ac00ca0e7abf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.678692] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.678692] env[68279]: value = "task-2963077" [ 831.678692] env[68279]: _type = "Task" [ 831.678692] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.688416] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963077, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.706183] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "7e34039c-c51a-4f9c-961c-144f6d8a5130" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.706418] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.711380] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.793493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Releasing lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.793741] env[68279]: DEBUG nova.compute.manager [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Inject network info {{(pid=68279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 831.794015] env[68279]: DEBUG nova.compute.manager [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] network_info to inject: |[{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 831.798806] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Reconfiguring VM instance to set the machine id {{(pid=68279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 831.799135] env[68279]: DEBUG oslo_concurrency.lockutils [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] Acquired lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.799319] env[68279]: DEBUG nova.network.neutron [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.800514] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f5c9770-0bf0-4825-9908-4645d3fb4834 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.812314] env[68279]: DEBUG oslo_concurrency.lockutils [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] Releasing lock "refresh_cache-b2e272b3-520a-4ef7-8141-a9d55739d6b9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.812537] env[68279]: DEBUG nova.compute.manager [req-bf8e03c1-1c7f-49aa-a2be-3308b20068e4 req-488786ae-7944-45ce-8be7-c54ec8811576 service nova] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Received event network-vif-deleted-aa6d2c82-4995-4c78-98c0-3d13f4d30137 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.816679] env[68279]: DEBUG oslo_vmware.api [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 831.816679] env[68279]: value = "task-2963078" [ 831.816679] env[68279]: _type = "Task" [ 831.816679] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.824951] env[68279]: DEBUG oslo_vmware.api [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2963078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.985726] env[68279]: DEBUG nova.objects.instance [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lazy-loading 'flavor' on Instance uuid f7db383a-648a-4984-ae25-72bc2ccfe369 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 831.990335] env[68279]: ERROR nova.scheduler.client.report [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [req-9cfdd149-14ab-41f9-b0de-5094e14a2fb6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9cfdd149-14ab-41f9-b0de-5094e14a2fb6"}]} [ 832.019199] env[68279]: DEBUG nova.scheduler.client.report [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 832.028490] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963072, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.038368] env[68279]: DEBUG nova.scheduler.client.report [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 832.038601] env[68279]: DEBUG nova.compute.provider_tree [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 832.052435] env[68279]: DEBUG nova.scheduler.client.report [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 832.073959] env[68279]: DEBUG nova.scheduler.client.report [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 832.089368] env[68279]: DEBUG nova.compute.manager [req-6ec2f56a-a0f5-40de-aded-933fa5046ab8 req-c4523d44-1e23-452d-a138-8d6e34bcaf76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Received event network-vif-deleted-949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.089624] env[68279]: INFO nova.compute.manager [req-6ec2f56a-a0f5-40de-aded-933fa5046ab8 req-c4523d44-1e23-452d-a138-8d6e34bcaf76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Neutron deleted interface 949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d; detaching it from the instance and deleting it from the info cache [ 832.090079] env[68279]: DEBUG nova.network.neutron [req-6ec2f56a-a0f5-40de-aded-933fa5046ab8 req-c4523d44-1e23-452d-a138-8d6e34bcaf76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.140510] env[68279]: DEBUG nova.compute.manager [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 832.140510] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.141798] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b574bb-4ea2-4102-bdd9-f369e1f3b9ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.149186] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.149892] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97fd0272-8691-42f2-96db-887357c89275 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.157148] env[68279]: DEBUG oslo_vmware.api [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 832.157148] env[68279]: value = "task-2963079" [ 832.157148] env[68279]: _type = "Task" [ 832.157148] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.167586] env[68279]: DEBUG oslo_vmware.api [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2963079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.191242] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963077, 'name': CreateVM_Task, 'duration_secs': 0.483107} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.191410] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.191956] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.192317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.192775] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.193172] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f30067-d762-4bc8-9bd9-3b0c41f4ecaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.204068] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 832.204068] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526d7d68-6253-9d7d-ced0-f9a116a20b9b" [ 832.204068] env[68279]: _type = "Task" [ 832.204068] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.210970] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963074, 'name': ReconfigVM_Task, 'duration_secs': 0.742473} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.213852] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Reconfigured VM instance instance-00000038 to attach disk [datastore1] e3763645-5a78-4929-98a3-108e72071211/e3763645-5a78-4929-98a3-108e72071211.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.214874] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7582e93c-5964-4273-b8c2-f7e9666f2f67 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.220262] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526d7d68-6253-9d7d-ced0-f9a116a20b9b, 'name': SearchDatastore_Task, 'duration_secs': 0.010121} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.221019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.221380] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.221737] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.222025] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.222423] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.222749] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ddf3a43-3358-4324-8bea-cfb2631cc986 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.226532] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 832.226532] env[68279]: value = "task-2963080" [ 832.226532] env[68279]: _type = "Task" [ 832.226532] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.235378] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.235863] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.237029] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5717a8d-8a80-4732-bdac-c2da23ea5f9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.242205] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963080, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.247816] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 832.247816] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d364b-a694-03a6-3964-650cfde408fd" [ 832.247816] env[68279]: _type = "Task" [ 832.247816] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.255597] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d364b-a694-03a6-3964-650cfde408fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.326447] env[68279]: DEBUG oslo_vmware.api [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2963078, 'name': ReconfigVM_Task, 'duration_secs': 0.191808} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.328064] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-58e54ff9-db4a-4415-9476-55cb70ddd0d7 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Reconfigured VM instance to set the machine id {{(pid=68279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 832.501556] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.524355] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963072, 'name': CreateSnapshot_Task, 'duration_secs': 1.083172} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.524620] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 832.525396] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee13cfe8-af76-4915-ba0e-f03c8c4b8d2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.571536] env[68279]: DEBUG nova.network.neutron [-] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.584757] env[68279]: DEBUG nova.network.neutron [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updated VIF entry in instance network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.585139] env[68279]: DEBUG nova.network.neutron [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.597193] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd2bb456-f2cd-4586-9e3a-258f4bcf6800 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.606877] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d73d1a3-c818-4950-a0c6-8c37b1bb7652 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.648824] env[68279]: DEBUG nova.compute.manager [req-6ec2f56a-a0f5-40de-aded-933fa5046ab8 req-c4523d44-1e23-452d-a138-8d6e34bcaf76 service nova] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Detach interface failed, port_id=949c2fdc-85d6-4e8b-85c2-9ebc4a3d3e6d, reason: Instance 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 832.671237] env[68279]: DEBUG oslo_vmware.api [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2963079, 'name': PowerOffVM_Task, 'duration_secs': 0.277599} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.671237] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 832.671237] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 832.677050] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70b09f23-1fa4-4b31-8d9e-579bf32c859d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.727348] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bb3618-bbb0-427e-b180-56e2dd8d3d86 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.743254] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d1c68e-f055-4035-b20e-c022c2d9e4b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.749316] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963080, 'name': Rename_Task, 'duration_secs': 0.235912} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.752320] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.756725] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6b0f7f1-ecb3-4a3d-bd04-241fde210757 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.785958] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a198ecd-9684-428c-b343-a70487e8b2f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.791845] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d364b-a694-03a6-3964-650cfde408fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009308} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.794808] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 832.794808] env[68279]: value = "task-2963082" [ 832.794808] env[68279]: _type = "Task" [ 832.794808] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.796890] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35de2fea-e7d1-49d7-9704-4f7a0df883ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.804556] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4d8a38-2f22-4d54-b85b-749f70b6a40f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.810494] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 832.810494] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52123c49-80f1-1f27-3a80-355a0b2305cb" [ 832.810494] env[68279]: _type = "Task" [ 832.810494] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.822598] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963082, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.823092] env[68279]: DEBUG nova.compute.provider_tree [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 832.833501] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52123c49-80f1-1f27-3a80-355a0b2305cb, 'name': SearchDatastore_Task, 'duration_secs': 0.013614} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.835912] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.836691] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.837820] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c079594c-551f-406c-9a33-31519fb971c4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.844577] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 832.844577] env[68279]: value = "task-2963083" [ 832.844577] env[68279]: _type = "Task" [ 832.844577] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.852508] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.045138] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 833.045532] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-404b7da5-df17-4cb5-a6b6-3aa28d7e1bfa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.053718] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 833.053718] env[68279]: value = "task-2963084" [ 833.053718] env[68279]: _type = "Task" [ 833.053718] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.064590] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963084, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.077207] env[68279]: INFO nova.compute.manager [-] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Took 1.51 seconds to deallocate network for instance. [ 833.090903] env[68279]: DEBUG oslo_concurrency.lockutils [req-c24d5fdb-3926-47da-b5e2-9cbbf8b8d499 req-2c040336-1df1-4f57-b171-4852080ba029 service nova] Releasing lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.091415] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquired lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.311537] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963082, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.355126] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963083, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.368022] env[68279]: DEBUG nova.scheduler.client.report [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 85 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 833.368022] env[68279]: DEBUG nova.compute.provider_tree [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 85 to 86 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 833.368022] env[68279]: DEBUG nova.compute.provider_tree [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.523666] env[68279]: DEBUG nova.network.neutron [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.563607] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963084, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.586279] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.666622] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.666881] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.666986] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Deleting the datastore file [datastore1] 336b7399-b64e-411f-99bc-ba0d292e371a {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.667283] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95d3762e-9421-4ff1-826e-bec7b63cd66e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.673738] env[68279]: DEBUG oslo_vmware.api [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for the task: (returnval){ [ 833.673738] env[68279]: value = "task-2963085" [ 833.673738] env[68279]: _type = "Task" [ 833.673738] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.681738] env[68279]: DEBUG oslo_vmware.api [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2963085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.810400] env[68279]: DEBUG oslo_vmware.api [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963082, 'name': PowerOnVM_Task, 'duration_secs': 0.887931} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.810792] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.811050] env[68279]: INFO nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Took 8.79 seconds to spawn the instance on the hypervisor. [ 833.811263] env[68279]: DEBUG nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 833.812256] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844352a7-c51c-44c4-860f-d14e0a16574a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.854972] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55862} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.856064] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 833.856298] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 833.856539] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db0a4815-bc6e-4bf2-9abc-47d77a8dc481 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.864033] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 833.864033] env[68279]: value = "task-2963086" [ 833.864033] env[68279]: _type = "Task" [ 833.864033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.872720] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 4.357s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.872921] env[68279]: INFO nova.compute.manager [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Migrating [ 833.881609] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.884829] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 44.377s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.885039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.885222] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 833.885606] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 41.424s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.885740] env[68279]: DEBUG nova.objects.instance [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 833.893976] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f00f56-41f3-4d3d-a024-81c10458c5fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.904062] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23dd02b-c6bf-4437-9572-4c804a24e03f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.923247] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108a58ab-e726-4c64-aadb-38fab534ed69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.930339] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae96bfb-217e-47fe-bfb8-e30647d1c0fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.962659] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178059MB free_disk=152GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 833.962814] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.072508] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963084, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.155183] env[68279]: DEBUG nova.compute.manager [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Received event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.155393] env[68279]: DEBUG nova.compute.manager [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing instance network info cache due to event network-changed-152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 834.155585] env[68279]: DEBUG oslo_concurrency.lockutils [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] Acquiring lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.184834] env[68279]: DEBUG oslo_vmware.api [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Task: {'id': task-2963085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.433101} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.185125] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.185318] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 834.185498] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.185672] env[68279]: INFO nova.compute.manager [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Took 2.05 seconds to destroy the instance on the hypervisor. [ 834.185905] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 834.186115] env[68279]: DEBUG nova.compute.manager [-] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 834.186210] env[68279]: DEBUG nova.network.neutron [-] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 834.271816] env[68279]: DEBUG nova.network.neutron [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.332272] env[68279]: INFO nova.compute.manager [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Took 47.70 seconds to build instance. [ 834.373559] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077217} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.377021] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.377021] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb1844a-8071-4a87-a2c2-942358a9117f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.397013] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.401258] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.401657] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.402187] env[68279]: DEBUG nova.network.neutron [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.403260] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5ce85e6-1b05-4d0b-99b6-94254f2a1462 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.422816] env[68279]: DEBUG oslo_concurrency.lockutils [None req-075e1b82-329c-4ee7-893f-2b19b120508b tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.537s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.430334] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.886s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.430334] env[68279]: INFO nova.compute.claims [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.437400] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 834.437400] env[68279]: value = "task-2963087" [ 834.437400] env[68279]: _type = "Task" [ 834.437400] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.445745] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963087, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.565322] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963084, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.777244] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Releasing lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.777244] env[68279]: DEBUG nova.compute.manager [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Inject network info {{(pid=68279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 834.777244] env[68279]: DEBUG nova.compute.manager [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] network_info to inject: |[{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 834.781651] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Reconfiguring VM instance to set the machine id {{(pid=68279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 834.783386] env[68279]: DEBUG oslo_concurrency.lockutils [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] Acquired lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.783621] env[68279]: DEBUG nova.network.neutron [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Refreshing network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.785355] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1a4133b-1f19-4849-816b-54c85eabab23 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.797663] env[68279]: DEBUG nova.compute.manager [req-ac674323-d1b8-4631-8dd7-3dae924380d8 req-722b1d7e-15e1-428d-b249-f43c3f6c7f6b service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Received event network-vif-deleted-7bb2c9fd-3cb4-4813-8661-d5baac85c2fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.797851] env[68279]: INFO nova.compute.manager [req-ac674323-d1b8-4631-8dd7-3dae924380d8 req-722b1d7e-15e1-428d-b249-f43c3f6c7f6b service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Neutron deleted interface 7bb2c9fd-3cb4-4813-8661-d5baac85c2fd; detaching it from the instance and deleting it from the info cache [ 834.798080] env[68279]: DEBUG nova.network.neutron [req-ac674323-d1b8-4631-8dd7-3dae924380d8 req-722b1d7e-15e1-428d-b249-f43c3f6c7f6b service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.803205] env[68279]: DEBUG nova.network.neutron [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [{"id": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "address": "fa:16:3e:3c:9f:44", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d0066b-e3", "ovs_interfaceid": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.811698] env[68279]: DEBUG oslo_vmware.api [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 834.811698] env[68279]: value = "task-2963088" [ 834.811698] env[68279]: _type = "Task" [ 834.811698] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.821503] env[68279]: DEBUG oslo_vmware.api [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2963088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.833641] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc87fc90-1956-494b-a9bd-e8bff64ff65f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.742s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.948656] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963087, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.067693] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963084, 'name': CloneVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.249266] env[68279]: DEBUG nova.network.neutron [-] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.306207] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.307803] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db1de623-a9d0-4fd8-af77-a3f369d75d0b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.323736] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb7d037-f43e-462a-9eeb-a2d0f2f75d07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.343766] env[68279]: DEBUG nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 835.346388] env[68279]: DEBUG oslo_vmware.api [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2963088, 'name': ReconfigVM_Task, 'duration_secs': 0.216429} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.349563] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e15960d5-9606-45b3-9e48-a2066e4698a8 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Reconfigured VM instance to set the machine id {{(pid=68279) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 835.380840] env[68279]: DEBUG nova.compute.manager [req-ac674323-d1b8-4631-8dd7-3dae924380d8 req-722b1d7e-15e1-428d-b249-f43c3f6c7f6b service nova] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Detach interface failed, port_id=7bb2c9fd-3cb4-4813-8661-d5baac85c2fd, reason: Instance 336b7399-b64e-411f-99bc-ba0d292e371a could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 835.450309] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963087, 'name': ReconfigVM_Task, 'duration_secs': 0.651785} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.451058] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Reconfigured VM instance instance-00000039 to attach disk [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.451454] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e25f5a25-7684-4469-8ba5-c2c5814f7f05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.458222] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 835.458222] env[68279]: value = "task-2963089" [ 835.458222] env[68279]: _type = "Task" [ 835.458222] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.471462] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963089, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.563151] env[68279]: DEBUG nova.network.neutron [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updated VIF entry in instance network info cache for port 152b3aa4-9e41-4813-87ce-2c7cfd51fae1. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.563151] env[68279]: DEBUG nova.network.neutron [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [{"id": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "address": "fa:16:3e:33:6f:8c", "network": {"id": "3fc23d4e-9eae-4394-a713-8c89e8907c63", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1177180523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ddf058e7d043439c7088865e742b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152b3aa4-9e", "ovs_interfaceid": "152b3aa4-9e41-4813-87ce-2c7cfd51fae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.572758] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963084, 'name': CloneVM_Task, 'duration_secs': 2.085352} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.577105] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Created linked-clone VM from snapshot [ 835.579744] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63492b5-16a7-4f7d-8bde-d82b221291cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.594756] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Uploading image 06c10dc0-2490-417f-b52a-908a1d661d11 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 835.623541] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 835.624219] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ca714e7e-db4e-4b07-877b-161101ff9576 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.632034] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 835.632034] env[68279]: value = "task-2963090" [ 835.632034] env[68279]: _type = "Task" [ 835.632034] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.649638] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963090, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.751897] env[68279]: INFO nova.compute.manager [-] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Took 1.57 seconds to deallocate network for instance. [ 835.869457] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.886507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "f7db383a-648a-4984-ae25-72bc2ccfe369" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.886727] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.886932] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "f7db383a-648a-4984-ae25-72bc2ccfe369-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.887131] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.887300] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.889581] env[68279]: INFO nova.compute.manager [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Terminating instance [ 835.972306] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963089, 'name': Rename_Task, 'duration_secs': 0.155625} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.972430] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.973028] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-235bf87e-ee8f-4abd-920e-24559e0c3cbc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.978964] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 835.978964] env[68279]: value = "task-2963091" [ 835.978964] env[68279]: _type = "Task" [ 835.978964] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.986720] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.066030] env[68279]: DEBUG oslo_concurrency.lockutils [req-5db86cfe-9ee2-4edc-8484-7808e1de0f28 req-eb2f49f7-a3e3-4bdc-bff5-b72c647769bf service nova] Releasing lock "refresh_cache-f7db383a-648a-4984-ae25-72bc2ccfe369" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.078741] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce5d1f5-305f-4262-9bbd-397df1bd00ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.090280] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324d0578-531a-4503-a287-6867abba41c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.129659] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b23bd87-cc9f-4f65-85a2-a5e31327e683 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.141022] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c5e51f-864b-4434-8f4b-75bfad9a3fed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.146030] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963090, 'name': Destroy_Task, 'duration_secs': 0.36342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.146688] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Destroyed the VM [ 836.147080] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 836.147128] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-631e1bd9-d7bb-434d-9be9-496f72d41ddb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.156574] env[68279]: DEBUG nova.compute.provider_tree [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.162465] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 836.162465] env[68279]: value = "task-2963092" [ 836.162465] env[68279]: _type = "Task" [ 836.162465] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.170205] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963092, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.263703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.283768] env[68279]: DEBUG nova.compute.manager [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Received event network-changed-f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 836.283982] env[68279]: DEBUG nova.compute.manager [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Refreshing instance network info cache due to event network-changed-f1c4e041-ced5-433b-a721-e9fa16d159ce. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 836.284238] env[68279]: DEBUG oslo_concurrency.lockutils [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] Acquiring lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.284376] env[68279]: DEBUG oslo_concurrency.lockutils [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] Acquired lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.284541] env[68279]: DEBUG nova.network.neutron [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Refreshing network info cache for port f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.393303] env[68279]: DEBUG nova.compute.manager [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 836.393596] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.394525] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a99aaf1-56ce-4f62-b899-72ab18f503f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.402334] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.402593] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d3bc079-2903-4389-859c-429d35a40a78 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.409209] env[68279]: DEBUG oslo_vmware.api [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 836.409209] env[68279]: value = "task-2963093" [ 836.409209] env[68279]: _type = "Task" [ 836.409209] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.488864] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963091, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.660471] env[68279]: DEBUG nova.scheduler.client.report [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.675845] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963092, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.825889] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478ede1f-73a5-41ad-bf7b-1110963ac4f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.851924] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance '665d932d-1068-4bb2-835c-2184a80753d1' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 836.918949] env[68279]: DEBUG oslo_vmware.api [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2963093, 'name': PowerOffVM_Task, 'duration_secs': 0.265714} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.921454] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.921795] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.921905] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e5efd80-dd1d-4cc7-a1af-60992cb67844 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.985683] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.985897] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.986092] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Deleting the datastore file [datastore2] f7db383a-648a-4984-ae25-72bc2ccfe369 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.986684] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1298d00f-bb79-423f-a99f-c99edb8dce5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.991048] env[68279]: DEBUG oslo_vmware.api [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963091, 'name': PowerOnVM_Task, 'duration_secs': 0.510838} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.991591] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.991820] env[68279]: INFO nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Took 5.44 seconds to spawn the instance on the hypervisor. [ 836.992010] env[68279]: DEBUG nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.992739] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b9cc47-cb0d-48fe-8253-32a5eb2a7424 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.995962] env[68279]: DEBUG oslo_vmware.api [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for the task: (returnval){ [ 836.995962] env[68279]: value = "task-2963095" [ 836.995962] env[68279]: _type = "Task" [ 836.995962] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.007410] env[68279]: DEBUG oslo_vmware.api [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2963095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.040528] env[68279]: DEBUG nova.network.neutron [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Updated VIF entry in instance network info cache for port f1c4e041-ced5-433b-a721-e9fa16d159ce. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.040974] env[68279]: DEBUG nova.network.neutron [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating instance_info_cache with network_info: [{"id": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "address": "fa:16:3e:0c:82:62", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1c4e041-ce", "ovs_interfaceid": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.166734] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.740s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.167466] env[68279]: DEBUG nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 837.174252] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.600s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.174252] env[68279]: DEBUG nova.objects.instance [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lazy-loading 'resources' on Instance uuid 6b778e98-12c2-42a5-a772-06ea32d090b8 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.180483] env[68279]: DEBUG oslo_vmware.api [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963092, 'name': RemoveSnapshot_Task, 'duration_secs': 0.647962} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.180722] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 837.230653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.230923] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.357695] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 837.358417] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-548ea9a1-ce37-4d08-860b-25734d6c9a87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.365765] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 837.365765] env[68279]: value = "task-2963096" [ 837.365765] env[68279]: _type = "Task" [ 837.365765] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.373846] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.510621] env[68279]: DEBUG oslo_vmware.api [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Task: {'id': task-2963095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253452} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.512440] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.512634] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.512813] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.513015] env[68279]: INFO nova.compute.manager [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Took 1.12 seconds to destroy the instance on the hypervisor. [ 837.513273] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.513863] env[68279]: INFO nova.compute.manager [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Took 49.80 seconds to build instance. [ 837.515131] env[68279]: DEBUG nova.compute.manager [-] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 837.515218] env[68279]: DEBUG nova.network.neutron [-] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.546030] env[68279]: DEBUG oslo_concurrency.lockutils [req-eaa4ccb2-922e-415a-a36b-810750cf3f3c req-4e310765-da43-440b-bec0-fdf6578adc63 service nova] Releasing lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.677664] env[68279]: DEBUG nova.compute.utils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 837.683620] env[68279]: DEBUG nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 837.683620] env[68279]: DEBUG nova.network.neutron [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.687106] env[68279]: WARNING nova.compute.manager [None req-bdbb13d6-59d5-4069-8f2f-ee15c77fefb2 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Image not found during snapshot: nova.exception.ImageNotFound: Image 06c10dc0-2490-417f-b52a-908a1d661d11 could not be found. [ 837.752083] env[68279]: DEBUG nova.policy [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c483580241842d98269131d55f317e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9d27076ab7348bb9ca331f4ff68e46f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.876239] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963096, 'name': PowerOffVM_Task, 'duration_secs': 0.355243} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.877032] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.877032] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance '665d932d-1068-4bb2-835c-2184a80753d1' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 838.020371] env[68279]: DEBUG oslo_concurrency.lockutils [None req-041ac58f-38c5-4530-957b-f5dc4fef70b8 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "b45f310f-e614-47db-9f6e-f35dd481137c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.588s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.184039] env[68279]: DEBUG nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 838.195537] env[68279]: DEBUG nova.network.neutron [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Successfully created port: a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.289356] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.289911] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.293017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.293017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.293017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.295222] env[68279]: INFO nova.compute.manager [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Terminating instance [ 838.387208] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.387735] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.387735] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.387875] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.387938] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.388070] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.388520] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.388614] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.388727] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.388952] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.389156] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.394758] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86169254-9170-4635-b82e-4ecd63dccc6f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.397816] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baf01549-0877-4f1d-8a45-4c59901ef47b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.415295] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaed049b-2a90-4d0a-bbc3-374f8549cc06 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.418531] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 838.418531] env[68279]: value = "task-2963097" [ 838.418531] env[68279]: _type = "Task" [ 838.418531] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.430326] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963097, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.455998] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9683123a-b943-4d45-9291-a069070d7245 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.464268] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4ba1f3-3af6-4046-8370-a47e3b112b3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.480350] env[68279]: DEBUG nova.compute.provider_tree [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.491284] env[68279]: DEBUG nova.network.neutron [-] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.527393] env[68279]: DEBUG nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.535015] env[68279]: DEBUG nova.compute.manager [req-5daa0481-ff2c-478e-84af-33f3ead74c02 req-3b376376-edf1-4eb2-bb45-608320de89f2 service nova] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Received event network-vif-deleted-152b3aa4-9e41-4813-87ce-2c7cfd51fae1 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 838.811302] env[68279]: DEBUG nova.compute.manager [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.811302] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.811302] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93f6835-17d3-4ee9-bbbe-07e5088f90da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.813439] env[68279]: INFO nova.compute.manager [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Rebuilding instance [ 838.825346] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.825346] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93efc755-766f-452c-86ae-51bdaf618897 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.833579] env[68279]: DEBUG oslo_vmware.api [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 838.833579] env[68279]: value = "task-2963098" [ 838.833579] env[68279]: _type = "Task" [ 838.833579] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.848464] env[68279]: DEBUG oslo_vmware.api [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.866897] env[68279]: DEBUG nova.compute.manager [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.867795] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f592095c-aa4f-4c03-81f7-f23de5e92022 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.930280] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963097, 'name': ReconfigVM_Task, 'duration_secs': 0.271858} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.930595] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance '665d932d-1068-4bb2-835c-2184a80753d1' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 838.983366] env[68279]: DEBUG nova.scheduler.client.report [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.994281] env[68279]: INFO nova.compute.manager [-] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Took 1.48 seconds to deallocate network for instance. [ 839.050596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.200630] env[68279]: DEBUG nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 839.221843] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 839.222094] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.222253] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 839.222440] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.222589] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 839.222745] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 839.223191] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 839.223191] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 839.223371] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 839.223601] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 839.223698] env[68279]: DEBUG nova.virt.hardware [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 839.224588] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35a9c3d-b547-494e-ac36-78ca6f87bf54 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.233371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493ab355-4691-47a1-b2d0-b0248de258d7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.345549] env[68279]: DEBUG oslo_vmware.api [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963098, 'name': PowerOffVM_Task, 'duration_secs': 0.229594} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.345549] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.345725] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.345895] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e2ca9b2-1b11-4988-afc3-257f07f3218f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.416568] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.417114] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.417114] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleting the datastore file [datastore2] 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.417369] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee3b59c3-cd9f-4204-8c15-221569fa644b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.424689] env[68279]: DEBUG oslo_vmware.api [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 839.424689] env[68279]: value = "task-2963100" [ 839.424689] env[68279]: _type = "Task" [ 839.424689] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.433128] env[68279]: DEBUG oslo_vmware.api [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.437210] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:45:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1a90e579-85e0-4e3b-aa60-125e40db2a15',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1064338248',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 839.437435] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.437593] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 839.437788] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.437939] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 839.438096] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 839.438300] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 839.438459] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 839.438624] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 839.438787] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 839.439028] env[68279]: DEBUG nova.virt.hardware [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 839.444313] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 839.444586] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c4c6d8c-097f-49a5-82df-17486d9ba77e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.463187] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 839.463187] env[68279]: value = "task-2963101" [ 839.463187] env[68279]: _type = "Task" [ 839.463187] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.471398] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.489600] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.316s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.492217] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.686s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.493803] env[68279]: INFO nova.compute.claims [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.500729] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.509509] env[68279]: INFO nova.scheduler.client.report [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Deleted allocations for instance 6b778e98-12c2-42a5-a772-06ea32d090b8 [ 839.884343] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.884343] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88268b8d-6cf4-436a-abca-e5614d0b5ae2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.894027] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 839.894027] env[68279]: value = "task-2963102" [ 839.894027] env[68279]: _type = "Task" [ 839.894027] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.905613] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963102, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.938057] env[68279]: DEBUG oslo_vmware.api [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963100, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148087} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.938057] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.938338] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.938385] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.938537] env[68279]: INFO nova.compute.manager [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 839.938777] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.939028] env[68279]: DEBUG nova.compute.manager [-] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.939198] env[68279]: DEBUG nova.network.neutron [-] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.974273] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963101, 'name': ReconfigVM_Task, 'duration_secs': 0.197369} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.974355] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 839.975226] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26175316-0e7e-4b05-b5ad-f8e8446d1dce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.010060] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.013149] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5e1bcc0-05b5-4d1b-8513-505ad5586738 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.035302] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e215f851-4046-49c6-ba5f-e443ba32ef99 tempest-SecurityGroupsTestJSON-869805828 tempest-SecurityGroupsTestJSON-869805828-project-member] Lock "6b778e98-12c2-42a5-a772-06ea32d090b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.926s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.038918] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 840.038918] env[68279]: value = "task-2963103" [ 840.038918] env[68279]: _type = "Task" [ 840.038918] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.050805] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963103, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.255537] env[68279]: DEBUG nova.network.neutron [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Successfully updated port: a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.315236] env[68279]: DEBUG nova.compute.manager [req-9b5c0c1d-3a96-4329-be69-b40871380c64 req-a856d8aa-5bd7-4416-a1fa-7ea50e4c3567 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received event network-vif-plugged-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.315236] env[68279]: DEBUG oslo_concurrency.lockutils [req-9b5c0c1d-3a96-4329-be69-b40871380c64 req-a856d8aa-5bd7-4416-a1fa-7ea50e4c3567 service nova] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.315236] env[68279]: DEBUG oslo_concurrency.lockutils [req-9b5c0c1d-3a96-4329-be69-b40871380c64 req-a856d8aa-5bd7-4416-a1fa-7ea50e4c3567 service nova] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.315236] env[68279]: DEBUG oslo_concurrency.lockutils [req-9b5c0c1d-3a96-4329-be69-b40871380c64 req-a856d8aa-5bd7-4416-a1fa-7ea50e4c3567 service nova] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.315236] env[68279]: DEBUG nova.compute.manager [req-9b5c0c1d-3a96-4329-be69-b40871380c64 req-a856d8aa-5bd7-4416-a1fa-7ea50e4c3567 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] No waiting events found dispatching network-vif-plugged-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.315236] env[68279]: WARNING nova.compute.manager [req-9b5c0c1d-3a96-4329-be69-b40871380c64 req-a856d8aa-5bd7-4416-a1fa-7ea50e4c3567 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received unexpected event network-vif-plugged-a047ea62-0c74-4967-820e-75553a4d8d7c for instance with vm_state building and task_state spawning. [ 840.405936] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963102, 'name': PowerOffVM_Task, 'duration_secs': 0.197403} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.406292] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.407014] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 840.407982] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3687eb06-7a50-4009-ae14-49ca23f108f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.415124] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 840.415371] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3c0e2ca-9d65-4fad-8fea-04b3475f1318 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.442937] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 840.442937] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 840.443232] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Deleting the datastore file [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 840.443684] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ed175ee-0b93-4ada-9193-655e36c38a18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.455100] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 840.455100] env[68279]: value = "task-2963105" [ 840.455100] env[68279]: _type = "Task" [ 840.455100] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.464241] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.552254] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.702734] env[68279]: DEBUG nova.compute.manager [req-3c694c81-012a-44a2-ba7e-e1b07571d803 req-4f0018fa-8878-4d93-b4ad-17f0e567d32c service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Received event network-vif-deleted-623a6aa7-e812-4756-b223-4c3c8c03b5c9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.703045] env[68279]: INFO nova.compute.manager [req-3c694c81-012a-44a2-ba7e-e1b07571d803 req-4f0018fa-8878-4d93-b4ad-17f0e567d32c service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Neutron deleted interface 623a6aa7-e812-4756-b223-4c3c8c03b5c9; detaching it from the instance and deleting it from the info cache [ 840.703208] env[68279]: DEBUG nova.network.neutron [req-3c694c81-012a-44a2-ba7e-e1b07571d803 req-4f0018fa-8878-4d93-b4ad-17f0e567d32c service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.759889] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.760123] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.760288] env[68279]: DEBUG nova.network.neutron [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.970093] env[68279]: DEBUG nova.network.neutron [-] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.971848] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095484} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.975211] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.975211] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 840.975211] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 841.053200] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963103, 'name': ReconfigVM_Task, 'duration_secs': 0.616142} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.053494] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.053757] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance '665d932d-1068-4bb2-835c-2184a80753d1' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 841.089865] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78331130-99b5-44a8-be52-8a021ca24f40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.098506] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25242d3-dba6-4763-9a12-f46fd4faeae0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.144020] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99852b58-bf70-4e59-ba2b-b8d7688a3148 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.152366] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f6ccf3-8430-414e-a028-54df78223fef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.170165] env[68279]: DEBUG nova.compute.provider_tree [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.205904] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1880bca1-2d98-4efa-af60-00e7407261ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.216027] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c3ea21-a68f-42f6-8bb0-48d6b920c963 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.252964] env[68279]: DEBUG nova.compute.manager [req-3c694c81-012a-44a2-ba7e-e1b07571d803 req-4f0018fa-8878-4d93-b4ad-17f0e567d32c service nova] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Detach interface failed, port_id=623a6aa7-e812-4756-b223-4c3c8c03b5c9, reason: Instance 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 841.305586] env[68279]: DEBUG nova.network.neutron [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.473353] env[68279]: INFO nova.compute.manager [-] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Took 1.53 seconds to deallocate network for instance. [ 841.500635] env[68279]: DEBUG nova.network.neutron [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa047ea62-0c", "ovs_interfaceid": "a047ea62-0c74-4967-820e-75553a4d8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.566735] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811e107c-60a8-4130-adf6-0064c51868b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.587843] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95dda86-52b0-41bd-afba-095361bbb73f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.608304] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance '665d932d-1068-4bb2-835c-2184a80753d1' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 841.694318] env[68279]: ERROR nova.scheduler.client.report [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [req-9e2acbab-2972-40fb-91c4-f1fde06dd369] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9e2acbab-2972-40fb-91c4-f1fde06dd369"}]} [ 841.713968] env[68279]: DEBUG nova.scheduler.client.report [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 841.737472] env[68279]: DEBUG nova.scheduler.client.report [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 841.737691] env[68279]: DEBUG nova.compute.provider_tree [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.751518] env[68279]: DEBUG nova.scheduler.client.report [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 841.771905] env[68279]: DEBUG nova.scheduler.client.report [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 841.980996] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.007217] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.008932] env[68279]: DEBUG nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Instance network_info: |[{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa047ea62-0c", "ovs_interfaceid": "a047ea62-0c74-4967-820e-75553a4d8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.008932] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:3b:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a047ea62-0c74-4967-820e-75553a4d8d7c', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.016339] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating folder: Project (b9d27076ab7348bb9ca331f4ff68e46f). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.019175] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8654e53-7beb-4efd-ac73-9b2a146f5fd1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.032395] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 842.032632] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.032795] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 842.032976] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.033140] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 842.033289] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 842.033710] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 842.033895] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 842.034262] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 842.034262] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 842.034576] env[68279]: DEBUG nova.virt.hardware [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 842.035368] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c519e15-21a6-4167-8cbf-dd6d9834bdf1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.042878] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Created folder: Project (b9d27076ab7348bb9ca331f4ff68e46f) in parent group-v594445. [ 842.042878] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating folder: Instances. Parent ref: group-v594613. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.042878] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75c8cbdf-088e-4637-b3fc-5e6a3c09e71b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.047413] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a73075-dd56-41b7-a75e-b93a541bbe62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.055587] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Created folder: Instances in parent group-v594613. [ 842.055828] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.056532] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.056854] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b06cf2b7-4e4d-4194-bf25-3cb27f54b6ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.084449] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.090436] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.091273] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.091568] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-770a2096-2f92-48ef-b4b5-347edf2f5d8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.108041] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.108041] env[68279]: value = "task-2963108" [ 842.108041] env[68279]: _type = "Task" [ 842.108041] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.112259] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.112259] env[68279]: value = "task-2963109" [ 842.112259] env[68279]: _type = "Task" [ 842.112259] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.121480] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963108, 'name': CreateVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.127265] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963109, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.178321] env[68279]: DEBUG nova.network.neutron [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Port 14d0066b-e387-4f2f-a12a-c40206f0b1d0 binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 842.382188] env[68279]: DEBUG nova.compute.manager [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received event network-changed-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.382380] env[68279]: DEBUG nova.compute.manager [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Refreshing instance network info cache due to event network-changed-a047ea62-0c74-4967-820e-75553a4d8d7c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 842.382558] env[68279]: DEBUG oslo_concurrency.lockutils [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] Acquiring lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.382700] env[68279]: DEBUG oslo_concurrency.lockutils [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] Acquired lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.382860] env[68279]: DEBUG nova.network.neutron [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Refreshing network info cache for port a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.394057] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403a83c2-1f5f-43eb-b72e-b213db4deafd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.403149] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8930e60-59a7-4299-99bc-33dd3c40e43d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.438135] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf7c771-9f9c-422f-b6eb-06b72d4fd884 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.445963] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75cf6a9-5855-4d42-91f9-7bf00559b21e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.460465] env[68279]: DEBUG nova.compute.provider_tree [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.623962] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963109, 'name': CreateVM_Task, 'duration_secs': 0.359917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.624217] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963108, 'name': CreateVM_Task, 'duration_secs': 0.388948} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.624315] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.624454] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.624876] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.625060] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.625395] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 842.626013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.626201] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.626487] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 842.626731] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dee85577-5f87-4820-ae12-99afced0dd41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.628345] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c687aa8c-f5cd-4822-b436-5e155cf8473e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.632569] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 842.632569] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b8f4ba-8a20-90cd-c459-409291ec5c6b" [ 842.632569] env[68279]: _type = "Task" [ 842.632569] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.633825] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 842.633825] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528f11e5-3258-9491-b61d-46c27239d4b8" [ 842.633825] env[68279]: _type = "Task" [ 842.633825] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.648342] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b8f4ba-8a20-90cd-c459-409291ec5c6b, 'name': SearchDatastore_Task, 'duration_secs': 0.009574} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.648754] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.649025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.649248] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.649392] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.649568] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.649823] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528f11e5-3258-9491-b61d-46c27239d4b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009808} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.650014] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff0ca4e9-85d8-43f9-b86f-97045c0e35aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.652009] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.652009] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.652201] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.652347] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.652517] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.652741] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d46d9183-061d-4d72-bdab-1c7ee5098de1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.662292] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.662504] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.664764] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.664764] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.664764] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea555bb4-5f24-4276-87f3-6d81b46ac095 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.668302] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc7a058-2847-45a6-8a8f-f537412dd646 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.672139] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 842.672139] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522f9f38-e105-a64b-d797-4637dc7c116c" [ 842.672139] env[68279]: _type = "Task" [ 842.672139] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.673733] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 842.673733] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5286837a-0a98-2692-2b01-b7f61122df21" [ 842.673733] env[68279]: _type = "Task" [ 842.673733] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.688829] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522f9f38-e105-a64b-d797-4637dc7c116c, 'name': SearchDatastore_Task, 'duration_secs': 0.00894} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.689892] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-209e6c30-0bc0-40bc-bc73-5fdc79371cb1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.695808] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5286837a-0a98-2692-2b01-b7f61122df21, 'name': SearchDatastore_Task, 'duration_secs': 0.008287} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.696884] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-025fe62c-cdcb-48eb-95d2-7e798010ffa8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.700667] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 842.700667] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52480c9b-9434-99bd-4ac1-b19c5fdce66b" [ 842.700667] env[68279]: _type = "Task" [ 842.700667] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.705265] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 842.705265] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526625dc-f373-2dc2-b847-a60b9b7c6c3c" [ 842.705265] env[68279]: _type = "Task" [ 842.705265] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.712978] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52480c9b-9434-99bd-4ac1-b19c5fdce66b, 'name': SearchDatastore_Task, 'duration_secs': 0.008542} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.713521] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.713775] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.714021] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d2d3e30-aaee-486c-b558-358cd0a8935d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.720604] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526625dc-f373-2dc2-b847-a60b9b7c6c3c, 'name': SearchDatastore_Task, 'duration_secs': 0.008113} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.721193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.721469] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.721729] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4772a98f-436c-4252-87ae-4ee4b72a5c7c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.725475] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 842.725475] env[68279]: value = "task-2963110" [ 842.725475] env[68279]: _type = "Task" [ 842.725475] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.730246] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 842.730246] env[68279]: value = "task-2963111" [ 842.730246] env[68279]: _type = "Task" [ 842.730246] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.738441] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.743501] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.000439] env[68279]: DEBUG nova.scheduler.client.report [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 843.000765] env[68279]: DEBUG nova.compute.provider_tree [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 87 to 88 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 843.000975] env[68279]: DEBUG nova.compute.provider_tree [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 843.141466] env[68279]: DEBUG nova.network.neutron [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updated VIF entry in instance network info cache for port a047ea62-0c74-4967-820e-75553a4d8d7c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.141874] env[68279]: DEBUG nova.network.neutron [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa047ea62-0c", "ovs_interfaceid": "a047ea62-0c74-4967-820e-75553a4d8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.212551] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "665d932d-1068-4bb2-835c-2184a80753d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.213026] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.213103] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.239561] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963110, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.245916] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963111, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.509435] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.017s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.510014] env[68279]: DEBUG nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 843.512575] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.258s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.512865] env[68279]: DEBUG nova.objects.instance [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lazy-loading 'resources' on Instance uuid 7858163d-8e68-4565-b1e0-ecd2e9be350d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.644651] env[68279]: DEBUG oslo_concurrency.lockutils [req-03ceff50-c3ae-480f-a02c-8aa36c91d6bd req-7a7c167f-ba4f-4e02-8091-48dbc9129a6b service nova] Releasing lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.738276] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963110, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.730462} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.738978] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.739217] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.739461] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5dd68cd-5c36-4122-8f89-ae838596644d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.744577] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58834} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.745171] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.745373] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.745601] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbe28fab-dc35-4d42-ae2c-0cf2f3146efb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.749685] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 843.749685] env[68279]: value = "task-2963112" [ 843.749685] env[68279]: _type = "Task" [ 843.749685] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.753937] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 843.753937] env[68279]: value = "task-2963113" [ 843.753937] env[68279]: _type = "Task" [ 843.753937] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.760068] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963112, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.764657] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.019341] env[68279]: DEBUG nova.compute.utils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 844.020837] env[68279]: DEBUG nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.020996] env[68279]: DEBUG nova.network.neutron [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.072163] env[68279]: DEBUG nova.policy [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6dcff6c11546f9b0907917a2463755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbad607de614a809c51668c2ac0d012', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 844.262642] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963112, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068019} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.263257] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.264021] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ae3c0b-c525-4129-96ed-f84638985b24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.270848] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062443} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.271548] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.272288] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b139367d-d762-41db-b110-19f6557bdbc5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.292042] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.292974] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.293184] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.293335] env[68279]: DEBUG nova.network.neutron [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.296921] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81f128d9-d689-480f-80af-62d95fafdf8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.328928] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.330433] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acdf614a-452e-440e-8dba-72e41961103c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.348788] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 844.348788] env[68279]: value = "task-2963114" [ 844.348788] env[68279]: _type = "Task" [ 844.348788] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.349156] env[68279]: DEBUG nova.network.neutron [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Successfully created port: c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.356230] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 844.356230] env[68279]: value = "task-2963115" [ 844.356230] env[68279]: _type = "Task" [ 844.356230] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.363280] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963114, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.371196] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.524443] env[68279]: DEBUG nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 844.638325] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c665cd82-b0ce-44c1-82a7-872eec83bcc4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.646492] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918843f5-4d86-4ec7-b764-d8116084d38f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.690030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92973265-c408-4a9e-90ae-b5999043776b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.698958] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e055474f-9591-4ccc-80d2-0de91d28118c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.713095] env[68279]: DEBUG nova.compute.provider_tree [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 844.746446] env[68279]: DEBUG nova.network.neutron [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [{"id": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "address": "fa:16:3e:3c:9f:44", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d0066b-e3", "ovs_interfaceid": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.862315] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963114, 'name': ReconfigVM_Task, 'duration_secs': 0.329282} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.865314] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Reconfigured VM instance instance-0000003a to attach disk [datastore1] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.866086] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46040472-b771-4b3e-8598-50c3bdda85c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.872707] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963115, 'name': ReconfigVM_Task, 'duration_secs': 0.272671} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.873974] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Reconfigured VM instance instance-00000039 to attach disk [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c/b45f310f-e614-47db-9f6e-f35dd481137c.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.874688] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 844.874688] env[68279]: value = "task-2963116" [ 844.874688] env[68279]: _type = "Task" [ 844.874688] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.874874] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0264045c-01e8-419c-8861-5ae92b436c85 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.883793] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 844.883793] env[68279]: value = "task-2963117" [ 844.883793] env[68279]: _type = "Task" [ 844.883793] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.887922] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963116, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.896627] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963117, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.244910] env[68279]: DEBUG nova.scheduler.client.report [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 845.245222] env[68279]: DEBUG nova.compute.provider_tree [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 88 to 89 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 845.245415] env[68279]: DEBUG nova.compute.provider_tree [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 845.249039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.387190] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963116, 'name': Rename_Task, 'duration_secs': 0.183595} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.387483] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.387772] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48ee545d-83c3-48ca-85ed-99c724975fe5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.397875] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963117, 'name': Rename_Task, 'duration_secs': 0.172621} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.398974] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.399287] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 845.399287] env[68279]: value = "task-2963118" [ 845.399287] env[68279]: _type = "Task" [ 845.399287] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.399459] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33e3097f-e9fa-46d9-b47a-4c4130fb8c40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.408552] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.409632] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 845.409632] env[68279]: value = "task-2963119" [ 845.409632] env[68279]: _type = "Task" [ 845.409632] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.416733] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.539695] env[68279]: DEBUG nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 845.566296] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 845.566569] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.566771] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 845.566995] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.567191] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 845.567380] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 845.567636] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 845.567823] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 845.568017] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 845.568273] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 845.568567] env[68279]: DEBUG nova.virt.hardware [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 845.570028] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f7a092-6c13-4b52-b1f4-c7955a3b1348 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.578756] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e75bc1-9f43-4032-bff3-21657b75f214 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.755148] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.242s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.760188] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.875s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.760466] env[68279]: DEBUG nova.objects.instance [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lazy-loading 'resources' on Instance uuid ed86ef15-1941-40c5-8178-344a7b401b58 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.776985] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d436220d-4b26-4c7d-aa34-9f820de5013c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.782204] env[68279]: INFO nova.scheduler.client.report [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted allocations for instance 7858163d-8e68-4565-b1e0-ecd2e9be350d [ 845.801830] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd0a246-169a-4ec3-b66a-05f0ede1d168 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.809690] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance '665d932d-1068-4bb2-835c-2184a80753d1' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 845.840645] env[68279]: DEBUG nova.compute.manager [req-1a18bda3-be3a-4d69-b53c-9362444f5e1a req-0ce25761-292c-45c6-9937-7924966b8106 service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Received event network-vif-plugged-c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.841058] env[68279]: DEBUG oslo_concurrency.lockutils [req-1a18bda3-be3a-4d69-b53c-9362444f5e1a req-0ce25761-292c-45c6-9937-7924966b8106 service nova] Acquiring lock "67466e30-5944-490c-a89b-2d32c59525be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.841421] env[68279]: DEBUG oslo_concurrency.lockutils [req-1a18bda3-be3a-4d69-b53c-9362444f5e1a req-0ce25761-292c-45c6-9937-7924966b8106 service nova] Lock "67466e30-5944-490c-a89b-2d32c59525be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.841764] env[68279]: DEBUG oslo_concurrency.lockutils [req-1a18bda3-be3a-4d69-b53c-9362444f5e1a req-0ce25761-292c-45c6-9937-7924966b8106 service nova] Lock "67466e30-5944-490c-a89b-2d32c59525be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.842042] env[68279]: DEBUG nova.compute.manager [req-1a18bda3-be3a-4d69-b53c-9362444f5e1a req-0ce25761-292c-45c6-9937-7924966b8106 service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] No waiting events found dispatching network-vif-plugged-c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 845.842788] env[68279]: WARNING nova.compute.manager [req-1a18bda3-be3a-4d69-b53c-9362444f5e1a req-0ce25761-292c-45c6-9937-7924966b8106 service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Received unexpected event network-vif-plugged-c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 for instance with vm_state building and task_state spawning. [ 845.888739] env[68279]: DEBUG nova.network.neutron [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Successfully updated port: c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.910236] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963118, 'name': PowerOnVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.918733] env[68279]: DEBUG oslo_vmware.api [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963119, 'name': PowerOnVM_Task, 'duration_secs': 0.442506} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.919067] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.919288] env[68279]: DEBUG nova.compute.manager [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.920033] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b41bd2-2824-4b37-98b3-862ea48e7c9c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.310800] env[68279]: DEBUG oslo_concurrency.lockutils [None req-300d219d-db8b-471c-ba36-dde386a9f90e tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "7858163d-8e68-4565-b1e0-ecd2e9be350d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.762s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.316637] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.316937] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2968e46-6481-45ee-9b6c-cf42c9a1eaed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.326628] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 846.326628] env[68279]: value = "task-2963120" [ 846.326628] env[68279]: _type = "Task" [ 846.326628] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.334758] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.391830] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "refresh_cache-67466e30-5944-490c-a89b-2d32c59525be" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.391972] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "refresh_cache-67466e30-5944-490c-a89b-2d32c59525be" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.392131] env[68279]: DEBUG nova.network.neutron [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.414455] env[68279]: DEBUG oslo_vmware.api [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963118, 'name': PowerOnVM_Task, 'duration_secs': 0.778973} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.414892] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.414967] env[68279]: INFO nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Took 7.21 seconds to spawn the instance on the hypervisor. [ 846.415180] env[68279]: DEBUG nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 846.416051] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b7fcb5-66ac-407a-aefb-63f31fe3ea4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.438794] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.645224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "95f0aeaa-75ab-4fd9-b28d-e43703429167" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.645515] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.645745] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "95f0aeaa-75ab-4fd9-b28d-e43703429167-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.645960] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.646162] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.648222] env[68279]: INFO nova.compute.manager [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Terminating instance [ 846.795264] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aa8895-43cb-4564-a4da-7bea1de55543 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.803533] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d89bda-27b4-480e-89e2-501348e7d845 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.840038] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee38e592-2f42-41b6-8810-9e1ac7aa7543 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.847999] env[68279]: DEBUG oslo_vmware.api [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963120, 'name': PowerOnVM_Task, 'duration_secs': 0.473708} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.850165] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.850361] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-04d1cf09-42e0-40e5-bea1-16e7a40a02ed tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance '665d932d-1068-4bb2-835c-2184a80753d1' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 846.854879] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a63672-8c61-4925-a484-b8fe8aadd830 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.873112] env[68279]: DEBUG nova.compute.provider_tree [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.934225] env[68279]: DEBUG nova.network.neutron [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.939028] env[68279]: INFO nova.compute.manager [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Took 53.43 seconds to build instance. [ 847.086162] env[68279]: DEBUG nova.network.neutron [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Updating instance_info_cache with network_info: [{"id": "c7a730dc-6b0a-4b9e-96db-c7f8c611ae16", "address": "fa:16:3e:82:ad:18", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7a730dc-6b", "ovs_interfaceid": "c7a730dc-6b0a-4b9e-96db-c7f8c611ae16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.152488] env[68279]: DEBUG nova.compute.manager [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 847.152647] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.153599] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c8ec94-44b9-4b82-940c-ca007b05f32c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.162440] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.162619] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7312ac8-347a-482b-ac02-65e3c77a5c45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.169788] env[68279]: DEBUG oslo_vmware.api [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 847.169788] env[68279]: value = "task-2963121" [ 847.169788] env[68279]: _type = "Task" [ 847.169788] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.173500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "b45f310f-e614-47db-9f6e-f35dd481137c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.173500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "b45f310f-e614-47db-9f6e-f35dd481137c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.173500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "b45f310f-e614-47db-9f6e-f35dd481137c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.173727] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "b45f310f-e614-47db-9f6e-f35dd481137c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.174675] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "b45f310f-e614-47db-9f6e-f35dd481137c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.176128] env[68279]: INFO nova.compute.manager [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Terminating instance [ 847.180330] env[68279]: DEBUG oslo_vmware.api [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963121, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.375994] env[68279]: DEBUG nova.scheduler.client.report [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.441390] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ac46b63-92f0-4a3b-9445-15ed9ad6b674 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.360s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.592361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "refresh_cache-67466e30-5944-490c-a89b-2d32c59525be" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.592361] env[68279]: DEBUG nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Instance network_info: |[{"id": "c7a730dc-6b0a-4b9e-96db-c7f8c611ae16", "address": "fa:16:3e:82:ad:18", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7a730dc-6b", "ovs_interfaceid": "c7a730dc-6b0a-4b9e-96db-c7f8c611ae16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 847.592361] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:ad:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7a730dc-6b0a-4b9e-96db-c7f8c611ae16', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.599052] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.599613] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.599901] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7601cae8-73f2-4087-8519-050a664e345b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.619870] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.619870] env[68279]: value = "task-2963122" [ 847.619870] env[68279]: _type = "Task" [ 847.619870] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.628389] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963122, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.682155] env[68279]: DEBUG oslo_vmware.api [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963121, 'name': PowerOffVM_Task, 'duration_secs': 0.205931} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.682895] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "refresh_cache-b45f310f-e614-47db-9f6e-f35dd481137c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.683126] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquired lock "refresh_cache-b45f310f-e614-47db-9f6e-f35dd481137c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.683382] env[68279]: DEBUG nova.network.neutron [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.684671] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.684946] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.685384] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b14bb08-e0b3-4d89-b42f-c3c920db618d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.746634] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.746776] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.747014] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleting the datastore file [datastore1] 95f0aeaa-75ab-4fd9-b28d-e43703429167 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.747307] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e0b4d47-3ee0-41cc-9ccb-97e3c844effc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.754639] env[68279]: DEBUG oslo_vmware.api [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 847.754639] env[68279]: value = "task-2963124" [ 847.754639] env[68279]: _type = "Task" [ 847.754639] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.763768] env[68279]: DEBUG oslo_vmware.api [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.881122] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.121s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.883639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.794s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.885302] env[68279]: INFO nova.compute.claims [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.918707] env[68279]: INFO nova.scheduler.client.report [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Deleted allocations for instance ed86ef15-1941-40c5-8178-344a7b401b58 [ 847.928299] env[68279]: DEBUG nova.compute.manager [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Received event network-changed-c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.928299] env[68279]: DEBUG nova.compute.manager [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Refreshing instance network info cache due to event network-changed-c7a730dc-6b0a-4b9e-96db-c7f8c611ae16. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 847.928414] env[68279]: DEBUG oslo_concurrency.lockutils [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] Acquiring lock "refresh_cache-67466e30-5944-490c-a89b-2d32c59525be" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.928758] env[68279]: DEBUG oslo_concurrency.lockutils [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] Acquired lock "refresh_cache-67466e30-5944-490c-a89b-2d32c59525be" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.928859] env[68279]: DEBUG nova.network.neutron [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Refreshing network info cache for port c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.945968] env[68279]: DEBUG nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.032196] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "19f693cd-b598-432d-acf5-64da9f640d5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.032385] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "19f693cd-b598-432d-acf5-64da9f640d5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.130219] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963122, 'name': CreateVM_Task, 'duration_secs': 0.309135} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.130407] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.131136] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.131308] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.131647] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 848.131908] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99dca4a8-7851-4613-bf9f-85263237d4ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.138287] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 848.138287] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b284e6-166a-b3d6-1824-c6fc6d266909" [ 848.138287] env[68279]: _type = "Task" [ 848.138287] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.145881] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b284e6-166a-b3d6-1824-c6fc6d266909, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.203013] env[68279]: DEBUG nova.network.neutron [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.264876] env[68279]: DEBUG oslo_vmware.api [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150658} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.265812] env[68279]: DEBUG nova.network.neutron [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.268085] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.268085] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.268085] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.268085] env[68279]: INFO nova.compute.manager [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Took 1.12 seconds to destroy the instance on the hypervisor. [ 848.268085] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.268085] env[68279]: DEBUG nova.compute.manager [-] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.268592] env[68279]: DEBUG nova.network.neutron [-] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.429728] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0853945c-841c-4824-9d64-5665e449d680 tempest-ServerTagsTestJSON-461736765 tempest-ServerTagsTestJSON-461736765-project-member] Lock "ed86ef15-1941-40c5-8178-344a7b401b58" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.998s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.479670] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.650202] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b284e6-166a-b3d6-1824-c6fc6d266909, 'name': SearchDatastore_Task, 'duration_secs': 0.009252} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.650546] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.650779] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.651165] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.651204] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.651397] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.651661] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4869e21a-e82a-4bb5-aa91-98ac36c1aeb5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.664316] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.664504] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.665253] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fe2cd6c-f072-49ed-a0b9-0ebd4a9a300e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.671136] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 848.671136] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52eb1c3f-d018-17b0-dcf9-614909166b3d" [ 848.671136] env[68279]: _type = "Task" [ 848.671136] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.681072] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52eb1c3f-d018-17b0-dcf9-614909166b3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.701876] env[68279]: DEBUG nova.network.neutron [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Updated VIF entry in instance network info cache for port c7a730dc-6b0a-4b9e-96db-c7f8c611ae16. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.702268] env[68279]: DEBUG nova.network.neutron [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Updating instance_info_cache with network_info: [{"id": "c7a730dc-6b0a-4b9e-96db-c7f8c611ae16", "address": "fa:16:3e:82:ad:18", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7a730dc-6b", "ovs_interfaceid": "c7a730dc-6b0a-4b9e-96db-c7f8c611ae16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.772265] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Releasing lock "refresh_cache-b45f310f-e614-47db-9f6e-f35dd481137c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.774972] env[68279]: DEBUG nova.compute.manager [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 848.775289] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.776425] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85abb7e-6379-49e8-9c5c-6056e265ab68 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.785552] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.786440] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbf77d2e-e0d5-4577-8bc2-6396b8145c4a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.794286] env[68279]: DEBUG oslo_vmware.api [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 848.794286] env[68279]: value = "task-2963125" [ 848.794286] env[68279]: _type = "Task" [ 848.794286] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.805319] env[68279]: DEBUG oslo_vmware.api [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.048013] env[68279]: DEBUG nova.network.neutron [-] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.185708] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52eb1c3f-d018-17b0-dcf9-614909166b3d, 'name': SearchDatastore_Task, 'duration_secs': 0.010245} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.186727] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5c6bc2e-72be-4932-a79f-2b28350bb021 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.195863] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 849.195863] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52657a89-deff-89d1-82c1-0bf40c4e1dc7" [ 849.195863] env[68279]: _type = "Task" [ 849.195863] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.206904] env[68279]: DEBUG oslo_concurrency.lockutils [req-25f8ed9e-9736-462a-869a-43afe6480387 req-ca5eae6a-586c-4574-a1a3-c8f8a4f423ec service nova] Releasing lock "refresh_cache-67466e30-5944-490c-a89b-2d32c59525be" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.207308] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52657a89-deff-89d1-82c1-0bf40c4e1dc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.304283] env[68279]: DEBUG oslo_vmware.api [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963125, 'name': PowerOffVM_Task, 'duration_secs': 0.22124} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.304563] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.304732] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.304986] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd065d69-c461-49ba-ac48-466f14b64507 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.310786] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.311017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.311201] env[68279]: INFO nova.compute.manager [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Shelving [ 849.334983] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.335236] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.335413] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Deleting the datastore file [datastore2] b45f310f-e614-47db-9f6e-f35dd481137c {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.335809] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eee0fe9e-6b9b-41b6-8a09-4a5e7db90a8e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.346103] env[68279]: DEBUG oslo_vmware.api [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for the task: (returnval){ [ 849.346103] env[68279]: value = "task-2963127" [ 849.346103] env[68279]: _type = "Task" [ 849.346103] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.357860] env[68279]: DEBUG oslo_vmware.api [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.511967] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81727a9c-cf75-444f-871f-3b2b82982517 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.521650] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcea757f-aabf-43d0-92a4-4a00730e4101 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.555342] env[68279]: INFO nova.compute.manager [-] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Took 1.29 seconds to deallocate network for instance. [ 849.557914] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359eef3d-ce3a-4578-9fdd-c8bf6ef6fb02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.569878] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba05f851-72fa-4b08-b47b-81d85685ab69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.586410] env[68279]: DEBUG nova.compute.provider_tree [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 849.708873] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52657a89-deff-89d1-82c1-0bf40c4e1dc7, 'name': SearchDatastore_Task, 'duration_secs': 0.011056} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.709334] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.709719] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 67466e30-5944-490c-a89b-2d32c59525be/67466e30-5944-490c-a89b-2d32c59525be.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.710096] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90a79820-c6c8-4282-85ce-78d3bea4e08c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.719168] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 849.719168] env[68279]: value = "task-2963128" [ 849.719168] env[68279]: _type = "Task" [ 849.719168] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.728372] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.857850] env[68279]: DEBUG oslo_vmware.api [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Task: {'id': task-2963127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134889} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.857850] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.857850] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.857850] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.857850] env[68279]: INFO nova.compute.manager [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Took 1.08 seconds to destroy the instance on the hypervisor. [ 849.858092] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.858204] env[68279]: DEBUG nova.compute.manager [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 849.858302] env[68279]: DEBUG nova.network.neutron [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 849.893897] env[68279]: DEBUG nova.network.neutron [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.066770] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.117801] env[68279]: ERROR nova.scheduler.client.report [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [req-75314c92-afb0-410a-ab38-77832d097b4f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-75314c92-afb0-410a-ab38-77832d097b4f"}]} [ 850.143380] env[68279]: DEBUG nova.scheduler.client.report [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 850.171313] env[68279]: DEBUG nova.scheduler.client.report [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 850.171534] env[68279]: DEBUG nova.compute.provider_tree [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.187590] env[68279]: DEBUG nova.scheduler.client.report [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 850.208447] env[68279]: DEBUG nova.scheduler.client.report [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 850.231605] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963128, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.239454] env[68279]: DEBUG nova.compute.manager [req-0c81824e-cb93-4b00-9c25-6fa190c74863 req-d67c1dc0-7696-4415-8de2-7bc404f8ddef service nova] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Received event network-vif-deleted-f0460457-e89a-40df-b773-9139c4f14b41 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.324102] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.324102] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6341fbf9-f238-4607-98d0-dee756e3781b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.332954] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 850.332954] env[68279]: value = "task-2963129" [ 850.332954] env[68279]: _type = "Task" [ 850.332954] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.342594] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.397190] env[68279]: DEBUG nova.network.neutron [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.506892] env[68279]: DEBUG nova.network.neutron [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Port 14d0066b-e387-4f2f-a12a-c40206f0b1d0 binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 850.507820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.507820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.507820] env[68279]: DEBUG nova.network.neutron [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.736525] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517489} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.736887] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 67466e30-5944-490c-a89b-2d32c59525be/67466e30-5944-490c-a89b-2d32c59525be.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.737506] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.737843] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33a0a4a3-737b-4a59-b037-773c419be9a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.746033] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 850.746033] env[68279]: value = "task-2963130" [ 850.746033] env[68279]: _type = "Task" [ 850.746033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.756127] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.800659] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912e1425-32f2-433d-bf15-d971165f503c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.809755] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a244941-9f23-4fed-b65e-21a3bc4fa477 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.846961] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f77728-6421-4295-82d0-944ec679d352 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.858897] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024db974-a1c4-4f8e-9ae8-58c9294d41be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.862782] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963129, 'name': PowerOffVM_Task, 'duration_secs': 0.27385} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.863068] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.864233] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20aa1a26-ad2f-4746-b2e2-64346e0250f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.874532] env[68279]: DEBUG nova.compute.provider_tree [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.892484] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43aa1a46-2a34-4656-82d1-601ab17dc9f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.899651] env[68279]: INFO nova.compute.manager [-] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Took 1.04 seconds to deallocate network for instance. [ 851.256314] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070097} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.258709] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.259899] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26018a5c-ba58-4160-be32-c6be8aa31728 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.284052] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 67466e30-5944-490c-a89b-2d32c59525be/67466e30-5944-490c-a89b-2d32c59525be.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.284607] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cb49c80-eb00-4050-84c8-2cc6629f16a5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.305911] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 851.305911] env[68279]: value = "task-2963131" [ 851.305911] env[68279]: _type = "Task" [ 851.305911] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.314901] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963131, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.315902] env[68279]: DEBUG nova.network.neutron [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [{"id": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "address": "fa:16:3e:3c:9f:44", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d0066b-e3", "ovs_interfaceid": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.405350] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 851.405661] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0862dc6d-a2f7-4a9f-bb89-03956a538302 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.410506] env[68279]: DEBUG nova.scheduler.client.report [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 851.410659] env[68279]: DEBUG nova.compute.provider_tree [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 90 to 91 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 851.410844] env[68279]: DEBUG nova.compute.provider_tree [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 851.415167] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.437547] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 851.437547] env[68279]: value = "task-2963132" [ 851.437547] env[68279]: _type = "Task" [ 851.437547] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.448054] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963132, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.824187] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.825758] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.917947] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.034s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.918491] env[68279]: DEBUG nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 851.921096] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.636s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.922871] env[68279]: INFO nova.compute.claims [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.949823] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963132, 'name': CreateSnapshot_Task, 'duration_secs': 0.500936} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.949823] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 851.950483] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8874afe5-e31b-4c40-94ae-eda29b20d0b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.316539] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963131, 'name': ReconfigVM_Task, 'duration_secs': 0.583402} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.316867] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 67466e30-5944-490c-a89b-2d32c59525be/67466e30-5944-490c-a89b-2d32c59525be.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.317491] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a59b84f0-8e6e-40b9-98db-547c18591c8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.325643] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 852.325643] env[68279]: value = "task-2963133" [ 852.325643] env[68279]: _type = "Task" [ 852.325643] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.329650] env[68279]: DEBUG nova.compute.manager [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68279) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 852.329880] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.337400] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963133, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.428037] env[68279]: DEBUG nova.compute.utils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 852.431390] env[68279]: DEBUG nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 852.431554] env[68279]: DEBUG nova.network.neutron [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.472692] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 852.473017] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-64eeda98-f68a-459b-8700-8162349999c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.483302] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 852.483302] env[68279]: value = "task-2963134" [ 852.483302] env[68279]: _type = "Task" [ 852.483302] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.491878] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963134, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.528687] env[68279]: DEBUG nova.policy [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '882367f9bf004b9bb3c4df22e982736b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2876d99458d04c4cad144c0b6c898193', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 852.840877] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963133, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.934551] env[68279]: DEBUG nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 852.995609] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963134, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.189297] env[68279]: DEBUG nova.network.neutron [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Successfully created port: 6d3ffb2b-c189-49f3-9753-d336f4c7157f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.338337] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963133, 'name': Rename_Task, 'duration_secs': 0.655138} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.338717] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.339098] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7d07516-af45-4a73-99f1-4b50ab6e7ef7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.347241] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 853.347241] env[68279]: value = "task-2963135" [ 853.347241] env[68279]: _type = "Task" [ 853.347241] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.359312] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963135, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.495267] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963134, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.739508] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f58079-6277-459d-9fdc-c80d1a532261 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.748617] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac544d5-577a-4253-8ae2-408432978402 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.782401] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ea87ef-d0b8-45b8-b447-542a81280565 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.791018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed0d49a-87a4-4f80-9028-47604464bdd3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.804383] env[68279]: DEBUG nova.compute.provider_tree [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.861082] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963135, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.946460] env[68279]: DEBUG nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 853.975193] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 853.975484] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.975670] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 853.975884] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.976078] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 853.976356] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 853.976472] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 853.976650] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 853.976840] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 853.977092] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 853.977280] env[68279]: DEBUG nova.virt.hardware [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 853.978143] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118fa0a2-3d50-4247-896c-dacfd22b3167 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.989628] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705841e7-9f8c-4773-802b-7936c166173a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.998197] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963134, 'name': CloneVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.308278] env[68279]: DEBUG nova.scheduler.client.report [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.360254] env[68279]: DEBUG oslo_vmware.api [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963135, 'name': PowerOnVM_Task, 'duration_secs': 1.000171} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.360575] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 854.360736] env[68279]: INFO nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Took 8.82 seconds to spawn the instance on the hypervisor. [ 854.360925] env[68279]: DEBUG nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 854.361705] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e656267c-899d-464b-84da-adb575904dde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.495571] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963134, 'name': CloneVM_Task, 'duration_secs': 1.572744} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.495845] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Created linked-clone VM from snapshot [ 854.496602] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bdfac5-613c-4f21-adc0-20a3b3f47956 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.503700] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Uploading image 8b039f01-d278-40cd-a3c9-5f971fe7f486 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 854.523359] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 854.523359] env[68279]: value = "vm-594619" [ 854.523359] env[68279]: _type = "VirtualMachine" [ 854.523359] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 854.523634] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a5532523-f411-419e-8bb3-b6784f925e02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.530456] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease: (returnval){ [ 854.530456] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca47f-3ad7-991b-7a68-a63c05b0e771" [ 854.530456] env[68279]: _type = "HttpNfcLease" [ 854.530456] env[68279]: } obtained for exporting VM: (result){ [ 854.530456] env[68279]: value = "vm-594619" [ 854.530456] env[68279]: _type = "VirtualMachine" [ 854.530456] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 854.530733] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the lease: (returnval){ [ 854.530733] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca47f-3ad7-991b-7a68-a63c05b0e771" [ 854.530733] env[68279]: _type = "HttpNfcLease" [ 854.530733] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 854.537179] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 854.537179] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca47f-3ad7-991b-7a68-a63c05b0e771" [ 854.537179] env[68279]: _type = "HttpNfcLease" [ 854.537179] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 854.754267] env[68279]: DEBUG nova.compute.manager [req-899454ab-f216-443a-807e-59246daa7131 req-56c8d9bc-b2c8-4b5a-ae9e-debafcef2b83 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Received event network-vif-plugged-6d3ffb2b-c189-49f3-9753-d336f4c7157f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 854.754610] env[68279]: DEBUG oslo_concurrency.lockutils [req-899454ab-f216-443a-807e-59246daa7131 req-56c8d9bc-b2c8-4b5a-ae9e-debafcef2b83 service nova] Acquiring lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.754859] env[68279]: DEBUG oslo_concurrency.lockutils [req-899454ab-f216-443a-807e-59246daa7131 req-56c8d9bc-b2c8-4b5a-ae9e-debafcef2b83 service nova] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.755092] env[68279]: DEBUG oslo_concurrency.lockutils [req-899454ab-f216-443a-807e-59246daa7131 req-56c8d9bc-b2c8-4b5a-ae9e-debafcef2b83 service nova] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.755485] env[68279]: DEBUG nova.compute.manager [req-899454ab-f216-443a-807e-59246daa7131 req-56c8d9bc-b2c8-4b5a-ae9e-debafcef2b83 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] No waiting events found dispatching network-vif-plugged-6d3ffb2b-c189-49f3-9753-d336f4c7157f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 854.755666] env[68279]: WARNING nova.compute.manager [req-899454ab-f216-443a-807e-59246daa7131 req-56c8d9bc-b2c8-4b5a-ae9e-debafcef2b83 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Received unexpected event network-vif-plugged-6d3ffb2b-c189-49f3-9753-d336f4c7157f for instance with vm_state building and task_state spawning. [ 854.813628] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.892s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.814182] env[68279]: DEBUG nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 854.819393] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.261s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.819393] env[68279]: INFO nova.compute.claims [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.866122] env[68279]: DEBUG nova.network.neutron [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Successfully updated port: 6d3ffb2b-c189-49f3-9753-d336f4c7157f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.880866] env[68279]: INFO nova.compute.manager [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Took 58.10 seconds to build instance. [ 855.039521] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.039521] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca47f-3ad7-991b-7a68-a63c05b0e771" [ 855.039521] env[68279]: _type = "HttpNfcLease" [ 855.039521] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 855.039900] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 855.039900] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca47f-3ad7-991b-7a68-a63c05b0e771" [ 855.039900] env[68279]: _type = "HttpNfcLease" [ 855.039900] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 855.041046] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492cb6dc-a234-4a43-98ff-31b042a0f6f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.048478] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cb3bec-90b2-eba8-d7f9-a4b7bc747f13/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 855.048663] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cb3bec-90b2-eba8-d7f9-a4b7bc747f13/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 855.138029] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-54f9cb9b-bbb9-45ad-9448-7ea8915e5194 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.323613] env[68279]: DEBUG nova.compute.utils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 855.326948] env[68279]: DEBUG nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 855.327139] env[68279]: DEBUG nova.network.neutron [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 855.368434] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "refresh_cache-97c3000f-a3d8-45c1-b0a4-12eb2b22b572" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.368723] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired lock "refresh_cache-97c3000f-a3d8-45c1-b0a4-12eb2b22b572" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.368903] env[68279]: DEBUG nova.network.neutron [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.381374] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a895d80-aed6-44c4-aa86-a9bf5b6c0386 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "67466e30-5944-490c-a89b-2d32c59525be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.263s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.443145] env[68279]: DEBUG nova.policy [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '882367f9bf004b9bb3c4df22e982736b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2876d99458d04c4cad144c0b6c898193', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 855.827917] env[68279]: DEBUG nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 855.884838] env[68279]: DEBUG nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 855.918949] env[68279]: DEBUG nova.network.neutron [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 856.101358] env[68279]: DEBUG nova.network.neutron [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Updating instance_info_cache with network_info: [{"id": "6d3ffb2b-c189-49f3-9753-d336f4c7157f", "address": "fa:16:3e:81:80:32", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3ffb2b-c1", "ovs_interfaceid": "6d3ffb2b-c189-49f3-9753-d336f4c7157f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.107623] env[68279]: DEBUG nova.network.neutron [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Successfully created port: df5903a9-63d8-44d6-8066-6790510ea180 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.408546] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.503016] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af470470-7f1a-49a3-b443-fbe354e327fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.510667] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b7258e-3c60-4449-833f-824473aa89f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.543512] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07eb2037-97e5-4bac-8a32-dd23e06773a5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.551958] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6a2813-5911-44a5-8009-dbcc89d555cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.566140] env[68279]: DEBUG nova.compute.provider_tree [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.605486] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Releasing lock "refresh_cache-97c3000f-a3d8-45c1-b0a4-12eb2b22b572" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.605486] env[68279]: DEBUG nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Instance network_info: |[{"id": "6d3ffb2b-c189-49f3-9753-d336f4c7157f", "address": "fa:16:3e:81:80:32", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3ffb2b-c1", "ovs_interfaceid": "6d3ffb2b-c189-49f3-9753-d336f4c7157f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 856.605717] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:80:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d3ffb2b-c189-49f3-9753-d336f4c7157f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.615338] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Creating folder: Project (2876d99458d04c4cad144c0b6c898193). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.616021] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5e2ab88-9cca-4b88-9ba4-93eeaea01ab0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.626751] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Created folder: Project (2876d99458d04c4cad144c0b6c898193) in parent group-v594445. [ 856.627059] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Creating folder: Instances. Parent ref: group-v594620. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.627457] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbcd087d-f8b3-4732-a546-4a29cbdafa99 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.637587] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Created folder: Instances in parent group-v594620. [ 856.637991] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.638249] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.638562] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5bb32a6-8544-4846-a605-2e67444c0bcd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.658478] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.658478] env[68279]: value = "task-2963139" [ 856.658478] env[68279]: _type = "Task" [ 856.658478] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.667629] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963139, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.801928] env[68279]: DEBUG nova.compute.manager [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Received event network-changed-6d3ffb2b-c189-49f3-9753-d336f4c7157f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.802224] env[68279]: DEBUG nova.compute.manager [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Refreshing instance network info cache due to event network-changed-6d3ffb2b-c189-49f3-9753-d336f4c7157f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 856.802552] env[68279]: DEBUG oslo_concurrency.lockutils [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] Acquiring lock "refresh_cache-97c3000f-a3d8-45c1-b0a4-12eb2b22b572" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.802695] env[68279]: DEBUG oslo_concurrency.lockutils [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] Acquired lock "refresh_cache-97c3000f-a3d8-45c1-b0a4-12eb2b22b572" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.802939] env[68279]: DEBUG nova.network.neutron [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Refreshing network info cache for port 6d3ffb2b-c189-49f3-9753-d336f4c7157f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.845543] env[68279]: DEBUG nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 856.871745] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 856.872090] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.872321] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 856.872513] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.872761] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 856.872998] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 856.873325] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 856.873552] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 856.873837] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 856.874016] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 856.874205] env[68279]: DEBUG nova.virt.hardware [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 856.875565] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38d9472-fc90-4a2c-b827-f66b657b36e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.884598] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d13053d-e605-4ebe-95fb-12584f001e69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.069870] env[68279]: DEBUG nova.scheduler.client.report [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.171861] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963139, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.257910] env[68279]: DEBUG nova.compute.manager [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 857.258900] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c7793b-9e29-42a8-806a-f827e2c5acc8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.522519] env[68279]: DEBUG nova.network.neutron [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Updated VIF entry in instance network info cache for port 6d3ffb2b-c189-49f3-9753-d336f4c7157f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.522934] env[68279]: DEBUG nova.network.neutron [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Updating instance_info_cache with network_info: [{"id": "6d3ffb2b-c189-49f3-9753-d336f4c7157f", "address": "fa:16:3e:81:80:32", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3ffb2b-c1", "ovs_interfaceid": "6d3ffb2b-c189-49f3-9753-d336f4c7157f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.576421] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.758s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.576421] env[68279]: DEBUG nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 857.580741] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.055s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.580741] env[68279]: DEBUG nova.objects.instance [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lazy-loading 'resources' on Instance uuid 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.670891] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963139, 'name': CreateVM_Task, 'duration_secs': 0.678443} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.671227] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.672179] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.672423] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.672901] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 857.673235] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-639fef1f-7546-427a-bb40-aef97d868db1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.679704] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 857.679704] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b5d6ed-44dd-f793-fafc-a2148741b84d" [ 857.679704] env[68279]: _type = "Task" [ 857.679704] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.689927] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b5d6ed-44dd-f793-fafc-a2148741b84d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.770187] env[68279]: INFO nova.compute.manager [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] instance snapshotting [ 857.773457] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495a5883-d610-4273-ac5d-b429a810dde1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.792845] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a88d679-158c-44cc-b5e0-96a91337b9ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.990326] env[68279]: DEBUG nova.network.neutron [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Successfully updated port: df5903a9-63d8-44d6-8066-6790510ea180 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.026847] env[68279]: DEBUG oslo_concurrency.lockutils [req-3391322e-fdaa-45b0-bdc1-788a6cf4876f req-39805949-e412-41d7-8ffb-20bb0d024165 service nova] Releasing lock "refresh_cache-97c3000f-a3d8-45c1-b0a4-12eb2b22b572" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.085400] env[68279]: DEBUG nova.compute.utils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 858.091670] env[68279]: DEBUG nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 858.091670] env[68279]: DEBUG nova.network.neutron [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 858.172794] env[68279]: DEBUG nova.policy [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f2a76467ed144c4af3094cac933f37e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a00666a2d2b04f789b1066eeae5a8a64', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 858.193349] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b5d6ed-44dd-f793-fafc-a2148741b84d, 'name': SearchDatastore_Task, 'duration_secs': 0.01125} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.193663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.193919] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.194174] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.194317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.194587] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.194936] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0344c2f-dac9-4481-baa2-184e658bd198 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.204986] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.204986] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.205801] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6e58936-2063-4731-93f7-9cadf982a163 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.212302] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 858.212302] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ee5c93-8746-eb2d-6654-8a7833191e65" [ 858.212302] env[68279]: _type = "Task" [ 858.212302] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.223148] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ee5c93-8746-eb2d-6654-8a7833191e65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.304483] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 858.304791] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-aeca5bab-b642-4f4e-b64b-2e9c9d4f8d56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.311704] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 858.311704] env[68279]: value = "task-2963140" [ 858.311704] env[68279]: _type = "Task" [ 858.311704] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.322605] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963140, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.471993] env[68279]: DEBUG nova.network.neutron [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Successfully created port: 7bd78f37-cfff-4fc2-ae4b-525b7f226259 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.496099] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "refresh_cache-1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.496099] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired lock "refresh_cache-1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.496099] env[68279]: DEBUG nova.network.neutron [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.589273] env[68279]: DEBUG nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 858.679482] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f102947-616a-4caf-a922-e7d7595f02fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.687563] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8964f5-fed5-4cd0-a611-5d4a1f938416 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.721567] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b893c99a-dd2c-44e9-b354-ab0e2ad4ad68 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.729777] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ee5c93-8746-eb2d-6654-8a7833191e65, 'name': SearchDatastore_Task, 'duration_secs': 0.016232} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.732368] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ed4cce0-f049-4630-817d-984633b27ce4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.735464] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45750bb1-06d9-4448-8523-7f059dde584e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.742166] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 858.742166] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d8890-78d9-2d0e-d3e8-285d07abdf39" [ 858.742166] env[68279]: _type = "Task" [ 858.742166] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.750151] env[68279]: DEBUG nova.compute.provider_tree [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.761027] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d8890-78d9-2d0e-d3e8-285d07abdf39, 'name': SearchDatastore_Task, 'duration_secs': 0.011678} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.761561] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.761875] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 97c3000f-a3d8-45c1-b0a4-12eb2b22b572/97c3000f-a3d8-45c1-b0a4-12eb2b22b572.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.762531] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4866545-c389-4d57-82ef-8073c32117e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.769605] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 858.769605] env[68279]: value = "task-2963141" [ 858.769605] env[68279]: _type = "Task" [ 858.769605] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.777848] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.823745] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963140, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.950329] env[68279]: DEBUG nova.compute.manager [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Received event network-vif-plugged-df5903a9-63d8-44d6-8066-6790510ea180 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 858.950472] env[68279]: DEBUG oslo_concurrency.lockutils [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] Acquiring lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.950752] env[68279]: DEBUG oslo_concurrency.lockutils [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.950855] env[68279]: DEBUG oslo_concurrency.lockutils [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.951044] env[68279]: DEBUG nova.compute.manager [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] No waiting events found dispatching network-vif-plugged-df5903a9-63d8-44d6-8066-6790510ea180 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 858.951241] env[68279]: WARNING nova.compute.manager [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Received unexpected event network-vif-plugged-df5903a9-63d8-44d6-8066-6790510ea180 for instance with vm_state building and task_state spawning. [ 858.951420] env[68279]: DEBUG nova.compute.manager [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Received event network-changed-df5903a9-63d8-44d6-8066-6790510ea180 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 858.951636] env[68279]: DEBUG nova.compute.manager [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Refreshing instance network info cache due to event network-changed-df5903a9-63d8-44d6-8066-6790510ea180. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 858.951739] env[68279]: DEBUG oslo_concurrency.lockutils [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] Acquiring lock "refresh_cache-1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.034853] env[68279]: DEBUG nova.network.neutron [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.186314] env[68279]: DEBUG nova.network.neutron [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Updating instance_info_cache with network_info: [{"id": "df5903a9-63d8-44d6-8066-6790510ea180", "address": "fa:16:3e:34:49:2b", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf5903a9-63", "ovs_interfaceid": "df5903a9-63d8-44d6-8066-6790510ea180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.256931] env[68279]: DEBUG nova.scheduler.client.report [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.281619] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963141, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.324444] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963140, 'name': CreateSnapshot_Task, 'duration_secs': 0.78709} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.324740] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 859.325592] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0038c8-0cc1-4a73-9bcd-12778279ce11 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.600691] env[68279]: DEBUG nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 859.622301] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 859.622554] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.622712] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 859.622908] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.623079] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 859.623234] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 859.623437] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 859.623595] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 859.623759] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 859.623919] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 859.624141] env[68279]: DEBUG nova.virt.hardware [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 859.625386] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aba856b-dc57-401e-9110-3a41deadf799 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.633529] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705304e3-32fa-42c4-9035-b6d54aaf1e17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.690991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Releasing lock "refresh_cache-1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.691357] env[68279]: DEBUG nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Instance network_info: |[{"id": "df5903a9-63d8-44d6-8066-6790510ea180", "address": "fa:16:3e:34:49:2b", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf5903a9-63", "ovs_interfaceid": "df5903a9-63d8-44d6-8066-6790510ea180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 859.691715] env[68279]: DEBUG oslo_concurrency.lockutils [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] Acquired lock "refresh_cache-1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 859.691948] env[68279]: DEBUG nova.network.neutron [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Refreshing network info cache for port df5903a9-63d8-44d6-8066-6790510ea180 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.693202] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:49:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df5903a9-63d8-44d6-8066-6790510ea180', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.701023] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.701534] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 859.702422] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12733c19-f6e8-41f6-8dd7-2411e2e0e3b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.724685] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.724685] env[68279]: value = "task-2963142" [ 859.724685] env[68279]: _type = "Task" [ 859.724685] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.733404] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963142, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.765777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.185s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.769244] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.475s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.769932] env[68279]: INFO nova.compute.claims [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.781547] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575038} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.781815] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 97c3000f-a3d8-45c1-b0a4-12eb2b22b572/97c3000f-a3d8-45c1-b0a4-12eb2b22b572.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.782060] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.782360] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0aaacc59-d040-4c11-9a25-533f88174a4d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.788819] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 859.788819] env[68279]: value = "task-2963143" [ 859.788819] env[68279]: _type = "Task" [ 859.788819] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.794780] env[68279]: INFO nova.scheduler.client.report [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Deleted allocations for instance 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7 [ 859.802509] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963143, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.844172] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 859.844551] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-879c372b-c1de-42c2-9d84-a424b706b4a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.854257] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 859.854257] env[68279]: value = "task-2963144" [ 859.854257] env[68279]: _type = "Task" [ 859.854257] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.863156] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963144, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.236831] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963142, 'name': CreateVM_Task, 'duration_secs': 0.38605} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.239452] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.240246] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.240504] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.240729] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 860.241316] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b0f509a-bb6c-4ba3-b591-30a8967416c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.246557] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 860.246557] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe6321-61e8-8d9b-e63f-d1ed17b18c27" [ 860.246557] env[68279]: _type = "Task" [ 860.246557] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.255268] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe6321-61e8-8d9b-e63f-d1ed17b18c27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.301626] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963143, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095822} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.304129] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.308051] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859d417d-5853-4353-82bb-ad4748e1a1d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.310292] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c37506c3-60de-4f92-8f19-d97908a0010c tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "65c3761e-c236-41a9-9adb-d1a6e7a9a7c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.776s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.335790] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 97c3000f-a3d8-45c1-b0a4-12eb2b22b572/97c3000f-a3d8-45c1-b0a4-12eb2b22b572.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.339879] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f12692a-1788-47f2-8557-a9a278a69cc2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.372860] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963144, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.375018] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 860.375018] env[68279]: value = "task-2963145" [ 860.375018] env[68279]: _type = "Task" [ 860.375018] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.382839] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963145, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.422872] env[68279]: DEBUG nova.network.neutron [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Successfully updated port: 7bd78f37-cfff-4fc2-ae4b-525b7f226259 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.463982] env[68279]: DEBUG nova.network.neutron [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Updated VIF entry in instance network info cache for port df5903a9-63d8-44d6-8066-6790510ea180. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.464458] env[68279]: DEBUG nova.network.neutron [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Updating instance_info_cache with network_info: [{"id": "df5903a9-63d8-44d6-8066-6790510ea180", "address": "fa:16:3e:34:49:2b", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf5903a9-63", "ovs_interfaceid": "df5903a9-63d8-44d6-8066-6790510ea180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.757527] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe6321-61e8-8d9b-e63f-d1ed17b18c27, 'name': SearchDatastore_Task, 'duration_secs': 0.016341} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.757911] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.758088] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 860.758333] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.758504] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.758739] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 860.759069] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-944fadab-519c-4ed8-8407-5f0004273448 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.768623] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 860.768910] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 860.769656] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09c216cc-33a1-449e-a4e0-2d21089a5696 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.775172] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 860.775172] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52714f70-8dc1-29e1-eb18-03d661b698ab" [ 860.775172] env[68279]: _type = "Task" [ 860.775172] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.787276] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52714f70-8dc1-29e1-eb18-03d661b698ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.864554] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963144, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.884658] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963145, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.925840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "refresh_cache-777eda1c-ca3f-4db0-b6b9-5901de5781ff" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.925994] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired lock "refresh_cache-777eda1c-ca3f-4db0-b6b9-5901de5781ff" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.926440] env[68279]: DEBUG nova.network.neutron [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.967203] env[68279]: DEBUG oslo_concurrency.lockutils [req-8d5fcd0e-87ff-4d3d-b22c-928c9c08362c req-c43aae2a-c2f8-4ec4-b86e-c8eab4377d48 service nova] Releasing lock "refresh_cache-1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.979917] env[68279]: DEBUG nova.compute.manager [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Received event network-vif-plugged-7bd78f37-cfff-4fc2-ae4b-525b7f226259 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 860.980213] env[68279]: DEBUG oslo_concurrency.lockutils [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] Acquiring lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.980385] env[68279]: DEBUG oslo_concurrency.lockutils [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.980578] env[68279]: DEBUG oslo_concurrency.lockutils [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.980701] env[68279]: DEBUG nova.compute.manager [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] No waiting events found dispatching network-vif-plugged-7bd78f37-cfff-4fc2-ae4b-525b7f226259 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 860.980864] env[68279]: WARNING nova.compute.manager [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Received unexpected event network-vif-plugged-7bd78f37-cfff-4fc2-ae4b-525b7f226259 for instance with vm_state building and task_state spawning. [ 860.981045] env[68279]: DEBUG nova.compute.manager [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Received event network-changed-7bd78f37-cfff-4fc2-ae4b-525b7f226259 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 860.981208] env[68279]: DEBUG nova.compute.manager [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Refreshing instance network info cache due to event network-changed-7bd78f37-cfff-4fc2-ae4b-525b7f226259. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 860.981367] env[68279]: DEBUG oslo_concurrency.lockutils [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] Acquiring lock "refresh_cache-777eda1c-ca3f-4db0-b6b9-5901de5781ff" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.287157] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52714f70-8dc1-29e1-eb18-03d661b698ab, 'name': SearchDatastore_Task, 'duration_secs': 0.01198} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.290229] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28cf3daf-610c-472d-b5de-891098c1dd0a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.295805] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 861.295805] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d140ea-14b5-4c0f-552a-13c643c13eae" [ 861.295805] env[68279]: _type = "Task" [ 861.295805] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.303920] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d140ea-14b5-4c0f-552a-13c643c13eae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.305658] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7963de67-d14c-443a-9949-be8d8f5027a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.312628] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0cb375-5fd6-447a-ae1e-40c73bc8ef5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.345066] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c7c047-c69a-4db6-a52b-732c61d9c709 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.354465] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed1d704-2b09-443e-b1cb-138c80e5416a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.373813] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963144, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.392740] env[68279]: DEBUG nova.compute.provider_tree [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.414690] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963145, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.467681] env[68279]: DEBUG nova.network.neutron [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.606725] env[68279]: DEBUG nova.network.neutron [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Updating instance_info_cache with network_info: [{"id": "7bd78f37-cfff-4fc2-ae4b-525b7f226259", "address": "fa:16:3e:e2:72:c4", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bd78f37-cf", "ovs_interfaceid": "7bd78f37-cfff-4fc2-ae4b-525b7f226259", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.814317] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d140ea-14b5-4c0f-552a-13c643c13eae, 'name': SearchDatastore_Task, 'duration_secs': 0.012509} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.814556] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.816024] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4/1ae768c9-3e20-4dee-bdd3-35d7c7d878e4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 861.816024] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4744474-a6b2-45b2-829f-18033a158a45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.821950] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 861.821950] env[68279]: value = "task-2963146" [ 861.821950] env[68279]: _type = "Task" [ 861.821950] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.830728] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963146, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.869897] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963144, 'name': CloneVM_Task, 'duration_secs': 1.672035} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.870240] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Created linked-clone VM from snapshot [ 861.871140] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c96ae11-a912-467d-a41e-2610b7c0aad5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.878846] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Uploading image cfebc35c-c617-4fc2-9a37-bdc659062679 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 861.899082] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963145, 'name': ReconfigVM_Task, 'duration_secs': 1.315531} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.901336] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 861.901336] env[68279]: value = "vm-594625" [ 861.901336] env[68279]: _type = "VirtualMachine" [ 861.901336] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 861.902061] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 97c3000f-a3d8-45c1-b0a4-12eb2b22b572/97c3000f-a3d8-45c1-b0a4-12eb2b22b572.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.902316] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e2786a62-fd60-47fa-a99b-6743d8f9ee86 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.904202] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2fac305-3a72-465e-b366-d98c98adfe7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.906388] env[68279]: DEBUG nova.scheduler.client.report [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 861.915449] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 861.915449] env[68279]: value = "task-2963148" [ 861.915449] env[68279]: _type = "Task" [ 861.915449] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.916479] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease: (returnval){ [ 861.916479] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b15e6a-58b4-7cae-f3cf-f047180e2506" [ 861.916479] env[68279]: _type = "HttpNfcLease" [ 861.916479] env[68279]: } obtained for exporting VM: (result){ [ 861.916479] env[68279]: value = "vm-594625" [ 861.916479] env[68279]: _type = "VirtualMachine" [ 861.916479] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 861.916685] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the lease: (returnval){ [ 861.916685] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b15e6a-58b4-7cae-f3cf-f047180e2506" [ 861.916685] env[68279]: _type = "HttpNfcLease" [ 861.916685] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 861.931966] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963148, 'name': Rename_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.933677] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 861.933677] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b15e6a-58b4-7cae-f3cf-f047180e2506" [ 861.933677] env[68279]: _type = "HttpNfcLease" [ 861.933677] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 861.933969] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 861.933969] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b15e6a-58b4-7cae-f3cf-f047180e2506" [ 861.933969] env[68279]: _type = "HttpNfcLease" [ 861.933969] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 861.934738] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e038f985-b01b-4270-9c37-8daade1091eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.943940] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e298f3-1740-0e72-ce7b-caf96ad9b1b9/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 861.944196] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e298f3-1740-0e72-ce7b-caf96ad9b1b9/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 862.054060] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4870afa6-e289-4ce5-b41c-4b64472c48a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.110124] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Releasing lock "refresh_cache-777eda1c-ca3f-4db0-b6b9-5901de5781ff" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.110284] env[68279]: DEBUG nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Instance network_info: |[{"id": "7bd78f37-cfff-4fc2-ae4b-525b7f226259", "address": "fa:16:3e:e2:72:c4", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bd78f37-cf", "ovs_interfaceid": "7bd78f37-cfff-4fc2-ae4b-525b7f226259", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 862.110516] env[68279]: DEBUG oslo_concurrency.lockutils [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] Acquired lock "refresh_cache-777eda1c-ca3f-4db0-b6b9-5901de5781ff" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.110696] env[68279]: DEBUG nova.network.neutron [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Refreshing network info cache for port 7bd78f37-cfff-4fc2-ae4b-525b7f226259 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.112083] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:72:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7bd78f37-cfff-4fc2-ae4b-525b7f226259', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 862.120462] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 862.121023] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 862.122052] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdc1669c-5608-4d38-875a-50c642cbbc38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.144189] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 862.144189] env[68279]: value = "task-2963149" [ 862.144189] env[68279]: _type = "Task" [ 862.144189] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.154323] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963149, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.337906] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963146, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.411716] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.412380] env[68279]: DEBUG nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 862.415201] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.971s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.416941] env[68279]: INFO nova.compute.claims [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.429555] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963148, 'name': Rename_Task, 'duration_secs': 0.144685} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.430085] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 862.430391] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-849b50ac-2678-4baf-9921-996048f20c1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.438140] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 862.438140] env[68279]: value = "task-2963150" [ 862.438140] env[68279]: _type = "Task" [ 862.438140] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.447532] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.653927] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963149, 'name': CreateVM_Task, 'duration_secs': 0.432374} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.654914] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.655235] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.655335] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.655805] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 862.656129] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0928314e-f56f-4acc-864e-615e8cd97f25 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.663857] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 862.663857] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52495c40-f322-d922-d2dd-39dafbc7ddd5" [ 862.663857] env[68279]: _type = "Task" [ 862.663857] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.672856] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52495c40-f322-d922-d2dd-39dafbc7ddd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.836165] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963146, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559367} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.836648] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4/1ae768c9-3e20-4dee-bdd3-35d7c7d878e4.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 862.836965] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 862.837279] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1036ac3-2c2c-40e8-8e22-64aa72c909f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.844523] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 862.844523] env[68279]: value = "task-2963151" [ 862.844523] env[68279]: _type = "Task" [ 862.844523] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.856142] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.870743] env[68279]: DEBUG nova.network.neutron [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Updated VIF entry in instance network info cache for port 7bd78f37-cfff-4fc2-ae4b-525b7f226259. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.871338] env[68279]: DEBUG nova.network.neutron [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Updating instance_info_cache with network_info: [{"id": "7bd78f37-cfff-4fc2-ae4b-525b7f226259", "address": "fa:16:3e:e2:72:c4", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bd78f37-cf", "ovs_interfaceid": "7bd78f37-cfff-4fc2-ae4b-525b7f226259", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.925586] env[68279]: DEBUG nova.compute.utils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 862.928519] env[68279]: DEBUG nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 862.929192] env[68279]: DEBUG nova.network.neutron [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.952418] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963150, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.974139] env[68279]: DEBUG nova.policy [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e41b38c635fb4229bb779e69a0e23cc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd06c0b786e2546b085d914e7d010de6d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.126934] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cb3bec-90b2-eba8-d7f9-a4b7bc747f13/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 863.127914] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5adecc-5e12-4ed6-a9c0-65ad465c4417 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.135232] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cb3bec-90b2-eba8-d7f9-a4b7bc747f13/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 863.135594] env[68279]: ERROR oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cb3bec-90b2-eba8-d7f9-a4b7bc747f13/disk-0.vmdk due to incomplete transfer. [ 863.135915] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3a2b6880-c6a5-4553-9a2e-7544884a912c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.143213] env[68279]: DEBUG oslo_vmware.rw_handles [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cb3bec-90b2-eba8-d7f9-a4b7bc747f13/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 863.143433] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Uploaded image 8b039f01-d278-40cd-a3c9-5f971fe7f486 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 863.146061] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 863.146389] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4850efae-e12f-454f-a8bc-dba863dd334c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.152970] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 863.152970] env[68279]: value = "task-2963152" [ 863.152970] env[68279]: _type = "Task" [ 863.152970] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.160831] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963152, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.172622] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52495c40-f322-d922-d2dd-39dafbc7ddd5, 'name': SearchDatastore_Task, 'duration_secs': 0.012675} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.173115] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.173396] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.173702] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.173954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.174107] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.174433] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84314050-cd1b-4127-b554-d9071ce3a3e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.183440] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.183744] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 863.184598] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6953797-6693-415d-acd0-b7c5cccf82b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.190218] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 863.190218] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52916bb6-6ebd-2ebf-b980-7980808e9f82" [ 863.190218] env[68279]: _type = "Task" [ 863.190218] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.198658] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52916bb6-6ebd-2ebf-b980-7980808e9f82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.271104] env[68279]: DEBUG nova.network.neutron [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Successfully created port: 5827d8c9-58c9-41f7-b9aa-e5d2ca91382f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.360365] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077707} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.360734] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.362225] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35618e7-46d6-4312-979d-a24d1bb6bca2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.377045] env[68279]: DEBUG oslo_concurrency.lockutils [req-9459dead-cdec-4649-9b89-16e4b54e3c69 req-996cada4-e3e2-40bf-b9e2-a6e0ead57752 service nova] Releasing lock "refresh_cache-777eda1c-ca3f-4db0-b6b9-5901de5781ff" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.386281] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4/1ae768c9-3e20-4dee-bdd3-35d7c7d878e4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.386660] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d783dd2-7f10-40b6-b226-6fa82c5c7f78 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.406865] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 863.406865] env[68279]: value = "task-2963153" [ 863.406865] env[68279]: _type = "Task" [ 863.406865] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.419550] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963153, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.429453] env[68279]: DEBUG nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 863.448879] env[68279]: DEBUG oslo_vmware.api [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963150, 'name': PowerOnVM_Task, 'duration_secs': 0.541111} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.449203] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 863.449386] env[68279]: INFO nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Took 9.50 seconds to spawn the instance on the hypervisor. [ 863.449659] env[68279]: DEBUG nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 863.450438] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0551716f-9509-4d9c-b32e-86557b201e16 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.664267] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963152, 'name': Destroy_Task, 'duration_secs': 0.380108} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.667198] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Destroyed the VM [ 863.667813] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 863.667950] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ef7268d9-6630-4249-9864-7534d1295e03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.675994] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 863.675994] env[68279]: value = "task-2963154" [ 863.675994] env[68279]: _type = "Task" [ 863.675994] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.687926] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963154, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.704861] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52916bb6-6ebd-2ebf-b980-7980808e9f82, 'name': SearchDatastore_Task, 'duration_secs': 0.01387} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.705686] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de12b21d-8e76-4b73-b76d-405806ece818 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.711717] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 863.711717] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf3c70-de89-0a5f-c86a-2dde52d2062c" [ 863.711717] env[68279]: _type = "Task" [ 863.711717] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.723656] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf3c70-de89-0a5f-c86a-2dde52d2062c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.921210] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.971395] env[68279]: INFO nova.compute.manager [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Took 62.90 seconds to build instance. [ 864.032551] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89961aaf-724a-4fc3-a999-8f2ff89725c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.040212] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94654e56-c4b6-4ed5-9b1e-7b8a8180af4d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.073490] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc0f6f5-5502-4ef1-a190-4af52ddb8bfb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.082592] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568434c4-d551-43e2-b342-eb189697e622 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.101679] env[68279]: DEBUG nova.compute.provider_tree [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.186588] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963154, 'name': RemoveSnapshot_Task, 'duration_secs': 0.44063} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.186917] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 864.187287] env[68279]: DEBUG nova.compute.manager [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 864.188148] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8cfad3-db07-4c7a-9d19-6c791a4b2d32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.225055] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bf3c70-de89-0a5f-c86a-2dde52d2062c, 'name': SearchDatastore_Task, 'duration_secs': 0.011646} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.225338] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.226032] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 777eda1c-ca3f-4db0-b6b9-5901de5781ff/777eda1c-ca3f-4db0-b6b9-5901de5781ff.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 864.226032] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8460dc8d-f361-4d13-8d42-afd879a9058c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.235188] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 864.235188] env[68279]: value = "task-2963155" [ 864.235188] env[68279]: _type = "Task" [ 864.235188] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.243118] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.418268] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963153, 'name': ReconfigVM_Task, 'duration_secs': 0.59533} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.418603] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4/1ae768c9-3e20-4dee-bdd3-35d7c7d878e4.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.419396] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f39f63ca-f194-4bc5-b9c9-a5d6d039770a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.426065] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 864.426065] env[68279]: value = "task-2963156" [ 864.426065] env[68279]: _type = "Task" [ 864.426065] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.439391] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963156, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.445042] env[68279]: DEBUG nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 864.475341] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 864.475606] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.475822] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 864.476105] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.476272] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 864.476425] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 864.476633] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 864.476792] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 864.477094] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 864.477177] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 864.477360] env[68279]: DEBUG nova.virt.hardware [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 864.477890] env[68279]: DEBUG oslo_concurrency.lockutils [None req-393ea229-571e-459e-aff9-eeea79eca125 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.289s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.478734] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10dd78cb-fb51-4238-81b7-059749174b9c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.488818] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd80acf-f518-4902-a6d8-b0d0dcfcaeb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.607025] env[68279]: DEBUG nova.scheduler.client.report [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.701831] env[68279]: INFO nova.compute.manager [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Shelve offloading [ 864.747103] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963155, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.792282] env[68279]: DEBUG nova.compute.manager [req-4d2a74a3-2961-44ef-a195-1281a19333fa req-8b4993eb-1789-446b-b8ee-8af990860ed2 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Received event network-vif-plugged-5827d8c9-58c9-41f7-b9aa-e5d2ca91382f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 864.792282] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d2a74a3-2961-44ef-a195-1281a19333fa req-8b4993eb-1789-446b-b8ee-8af990860ed2 service nova] Acquiring lock "8aa8c866-4807-4a06-904e-53c149047d65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.792282] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d2a74a3-2961-44ef-a195-1281a19333fa req-8b4993eb-1789-446b-b8ee-8af990860ed2 service nova] Lock "8aa8c866-4807-4a06-904e-53c149047d65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.792282] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d2a74a3-2961-44ef-a195-1281a19333fa req-8b4993eb-1789-446b-b8ee-8af990860ed2 service nova] Lock "8aa8c866-4807-4a06-904e-53c149047d65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.792930] env[68279]: DEBUG nova.compute.manager [req-4d2a74a3-2961-44ef-a195-1281a19333fa req-8b4993eb-1789-446b-b8ee-8af990860ed2 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] No waiting events found dispatching network-vif-plugged-5827d8c9-58c9-41f7-b9aa-e5d2ca91382f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 864.793262] env[68279]: WARNING nova.compute.manager [req-4d2a74a3-2961-44ef-a195-1281a19333fa req-8b4993eb-1789-446b-b8ee-8af990860ed2 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Received unexpected event network-vif-plugged-5827d8c9-58c9-41f7-b9aa-e5d2ca91382f for instance with vm_state building and task_state spawning. [ 864.832249] env[68279]: DEBUG nova.network.neutron [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Successfully updated port: 5827d8c9-58c9-41f7-b9aa-e5d2ca91382f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.935889] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963156, 'name': Rename_Task, 'duration_secs': 0.270678} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.936212] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 864.936463] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fdf90100-1fda-4da7-a31a-948db284f8db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.943692] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 864.943692] env[68279]: value = "task-2963157" [ 864.943692] env[68279]: _type = "Task" [ 864.943692] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.951641] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963157, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.986054] env[68279]: DEBUG nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 865.112014] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.112674] env[68279]: DEBUG nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 865.116385] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.099s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.116534] env[68279]: DEBUG nova.objects.instance [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lazy-loading 'resources' on Instance uuid e5565f0d-ed60-4ac8-bba1-ab46b337dd90 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.207154] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 865.207530] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dfcd28f-5420-4fb5-ac05-3e41dc13df99 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.215023] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 865.215023] env[68279]: value = "task-2963158" [ 865.215023] env[68279]: _type = "Task" [ 865.215023] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.224500] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 865.224747] env[68279]: DEBUG nova.compute.manager [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.225579] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05e4ce9-4bbf-4efb-b125-9fd8b8e14440 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.231654] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.231823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.232032] env[68279]: DEBUG nova.network.neutron [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.244413] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963155, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540524} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.245434] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 777eda1c-ca3f-4db0-b6b9-5901de5781ff/777eda1c-ca3f-4db0-b6b9-5901de5781ff.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.245753] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.245983] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f24cc5ca-9b88-4ba9-a272-d6ccb4d4d5b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.253226] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 865.253226] env[68279]: value = "task-2963159" [ 865.253226] env[68279]: _type = "Task" [ 865.253226] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.262919] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963159, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.334980] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "refresh_cache-8aa8c866-4807-4a06-904e-53c149047d65" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.335207] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquired lock "refresh_cache-8aa8c866-4807-4a06-904e-53c149047d65" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.335320] env[68279]: DEBUG nova.network.neutron [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.455534] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963157, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.509474] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.623199] env[68279]: DEBUG nova.compute.utils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 865.626430] env[68279]: DEBUG nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 865.626430] env[68279]: DEBUG nova.network.neutron [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 865.677390] env[68279]: DEBUG nova.policy [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22a4cadc191d4e9fa023eff168c8ddf9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e382d6dee334cd2bcf097cbe56f1143', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 865.765744] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963159, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.768682] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.770114] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddd2c06-2a71-4cf9-af76-f3c36a06e4cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.794649] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 777eda1c-ca3f-4db0-b6b9-5901de5781ff/777eda1c-ca3f-4db0-b6b9-5901de5781ff.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.797647] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9bfe84d-094b-4923-91bd-52d5c845c9a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.821615] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 865.821615] env[68279]: value = "task-2963160" [ 865.821615] env[68279]: _type = "Task" [ 865.821615] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.834947] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963160, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.888803] env[68279]: DEBUG nova.network.neutron [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.954061] env[68279]: DEBUG oslo_vmware.api [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963157, 'name': PowerOnVM_Task, 'duration_secs': 0.685822} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.954414] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 865.954556] env[68279]: INFO nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Took 9.11 seconds to spawn the instance on the hypervisor. [ 865.954733] env[68279]: DEBUG nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.955565] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f01d67-7172-4d82-b302-7067ba3c679b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.101134] env[68279]: DEBUG nova.network.neutron [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Updating instance_info_cache with network_info: [{"id": "5827d8c9-58c9-41f7-b9aa-e5d2ca91382f", "address": "fa:16:3e:cf:70:f7", "network": {"id": "c598ce9b-f69d-42af-965f-a4722b810c0e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-713894145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d06c0b786e2546b085d914e7d010de6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5827d8c9-58", "ovs_interfaceid": "5827d8c9-58c9-41f7-b9aa-e5d2ca91382f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.119388] env[68279]: DEBUG nova.network.neutron [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Successfully created port: 20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.129590] env[68279]: DEBUG nova.compute.utils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 866.203562] env[68279]: DEBUG nova.network.neutron [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15317896-8b", "ovs_interfaceid": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.298899] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cff1ff-b711-4209-a631-0a6d044f12a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.308222] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c912a8-c44e-42ad-9d59-7b48027105f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.344433] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee2b908-2407-49f8-90ed-02a725796b68 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.353472] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963160, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.356560] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff16808-0f7c-4725-a55a-bdfe34fe8cc3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.372849] env[68279]: DEBUG nova.compute.provider_tree [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.480436] env[68279]: INFO nova.compute.manager [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Took 61.21 seconds to build instance. [ 866.604725] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Releasing lock "refresh_cache-8aa8c866-4807-4a06-904e-53c149047d65" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.605118] env[68279]: DEBUG nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Instance network_info: |[{"id": "5827d8c9-58c9-41f7-b9aa-e5d2ca91382f", "address": "fa:16:3e:cf:70:f7", "network": {"id": "c598ce9b-f69d-42af-965f-a4722b810c0e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-713894145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d06c0b786e2546b085d914e7d010de6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5827d8c9-58", "ovs_interfaceid": "5827d8c9-58c9-41f7-b9aa-e5d2ca91382f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 866.605585] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:70:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5827d8c9-58c9-41f7-b9aa-e5d2ca91382f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.615023] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Creating folder: Project (d06c0b786e2546b085d914e7d010de6d). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.615023] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4e85c67-360b-4749-9266-2b62e12e792a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.629342] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Created folder: Project (d06c0b786e2546b085d914e7d010de6d) in parent group-v594445. [ 866.629549] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Creating folder: Instances. Parent ref: group-v594627. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.629803] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfabdb27-0e2a-4abc-98f2-caff4b7b55c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.631763] env[68279]: DEBUG nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 866.643444] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Created folder: Instances in parent group-v594627. [ 866.643961] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.643961] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.644132] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-931ab68b-6006-4feb-930c-d4683a0fabab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.664808] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.664808] env[68279]: value = "task-2963163" [ 866.664808] env[68279]: _type = "Task" [ 866.664808] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.673718] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963163, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.706865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.855825] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963160, 'name': ReconfigVM_Task, 'duration_secs': 0.608014} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.856167] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 777eda1c-ca3f-4db0-b6b9-5901de5781ff/777eda1c-ca3f-4db0-b6b9-5901de5781ff.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.856964] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34269d31-73b8-4081-a0f4-0b986a8286b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.863829] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 866.863829] env[68279]: value = "task-2963164" [ 866.863829] env[68279]: _type = "Task" [ 866.863829] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.872215] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963164, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.875727] env[68279]: DEBUG nova.compute.manager [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Received event network-changed-5827d8c9-58c9-41f7-b9aa-e5d2ca91382f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 866.876018] env[68279]: DEBUG nova.compute.manager [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Refreshing instance network info cache due to event network-changed-5827d8c9-58c9-41f7-b9aa-e5d2ca91382f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 866.876883] env[68279]: DEBUG oslo_concurrency.lockutils [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] Acquiring lock "refresh_cache-8aa8c866-4807-4a06-904e-53c149047d65" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.877088] env[68279]: DEBUG oslo_concurrency.lockutils [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] Acquired lock "refresh_cache-8aa8c866-4807-4a06-904e-53c149047d65" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.877238] env[68279]: DEBUG nova.network.neutron [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Refreshing network info cache for port 5827d8c9-58c9-41f7-b9aa-e5d2ca91382f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 866.879643] env[68279]: DEBUG nova.scheduler.client.report [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.982553] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81ba2696-d33b-4ddd-ac9e-ee991f5c1cc1 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.051s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.064627] env[68279]: DEBUG nova.compute.manager [req-8ccdeabf-7ed4-483b-9241-1de153fd2429 req-2b026487-9ba4-496f-869a-8feb113ac90d service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-vif-unplugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.064853] env[68279]: DEBUG oslo_concurrency.lockutils [req-8ccdeabf-7ed4-483b-9241-1de153fd2429 req-2b026487-9ba4-496f-869a-8feb113ac90d service nova] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.065080] env[68279]: DEBUG oslo_concurrency.lockutils [req-8ccdeabf-7ed4-483b-9241-1de153fd2429 req-2b026487-9ba4-496f-869a-8feb113ac90d service nova] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.065255] env[68279]: DEBUG oslo_concurrency.lockutils [req-8ccdeabf-7ed4-483b-9241-1de153fd2429 req-2b026487-9ba4-496f-869a-8feb113ac90d service nova] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.065429] env[68279]: DEBUG nova.compute.manager [req-8ccdeabf-7ed4-483b-9241-1de153fd2429 req-2b026487-9ba4-496f-869a-8feb113ac90d service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] No waiting events found dispatching network-vif-unplugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.065617] env[68279]: WARNING nova.compute.manager [req-8ccdeabf-7ed4-483b-9241-1de153fd2429 req-2b026487-9ba4-496f-869a-8feb113ac90d service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received unexpected event network-vif-unplugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 for instance with vm_state shelved and task_state shelving_offloading. [ 867.175542] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963163, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.374616] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963164, 'name': Rename_Task, 'duration_secs': 0.252026} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.374616] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.374813] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87b7851b-6822-4fca-8180-7d7d37d60522 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.383300] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 867.383300] env[68279]: value = "task-2963165" [ 867.383300] env[68279]: _type = "Task" [ 867.383300] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.387173] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.268s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.391436] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 43.180s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.391621] env[68279]: DEBUG nova.objects.instance [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 867.408255] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963165, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.422733] env[68279]: INFO nova.scheduler.client.report [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Deleted allocations for instance e5565f0d-ed60-4ac8-bba1-ab46b337dd90 [ 867.487187] env[68279]: DEBUG nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 867.517289] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 867.519152] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cde0f2-3f98-48b6-b2cf-e7bd28ad8514 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.526336] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 867.526627] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8cb4ca9-d756-4e71-bd35-72c9c8f49801 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.592715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 867.592945] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 867.593147] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleting the datastore file [datastore2] eccc5882-2c8b-456d-bbd2-d9ed22777a77 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 867.593426] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c470c91-8231-41c7-a7dd-15d70c12470b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.604270] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 867.604270] env[68279]: value = "task-2963167" [ 867.604270] env[68279]: _type = "Task" [ 867.604270] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.614291] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.644574] env[68279]: DEBUG nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 867.668589] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:43:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1015773663',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1666082034',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 867.668733] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.668841] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.669100] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.669253] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.669398] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 867.669601] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 867.669757] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 867.669946] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 867.670153] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 867.670334] env[68279]: DEBUG nova.virt.hardware [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 867.674486] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac602f4-4165-4818-9685-071a9c6bfc38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.686381] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963163, 'name': CreateVM_Task, 'duration_secs': 0.600104} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.686623] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.688590] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ba47a2-5470-46cb-8d30-779b92af84cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.693289] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.693449] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.693778] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 867.694056] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-800b6c6b-ddbd-4ed9-8a45-06eba7149fe6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.698822] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 867.698822] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52480d56-4d68-cb85-37d4-42d07bc6099d" [ 867.698822] env[68279]: _type = "Task" [ 867.698822] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.715758] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52480d56-4d68-cb85-37d4-42d07bc6099d, 'name': SearchDatastore_Task, 'duration_secs': 0.010485} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.716071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.716316] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.716549] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.716704] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.716939] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.717361] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0aa55d63-7f2f-4f5b-b1f3-642927fafe6d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.728275] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.728275] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.728502] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9141860c-3e8d-4a7d-91b5-71d381455e93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.733470] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 867.733470] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9569c-f12b-a3cc-a785-dabbfafa4213" [ 867.733470] env[68279]: _type = "Task" [ 867.733470] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.741546] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9569c-f12b-a3cc-a785-dabbfafa4213, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.742539] env[68279]: DEBUG nova.network.neutron [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Updated VIF entry in instance network info cache for port 5827d8c9-58c9-41f7-b9aa-e5d2ca91382f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.742705] env[68279]: DEBUG nova.network.neutron [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Updating instance_info_cache with network_info: [{"id": "5827d8c9-58c9-41f7-b9aa-e5d2ca91382f", "address": "fa:16:3e:cf:70:f7", "network": {"id": "c598ce9b-f69d-42af-965f-a4722b810c0e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-713894145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d06c0b786e2546b085d914e7d010de6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5827d8c9-58", "ovs_interfaceid": "5827d8c9-58c9-41f7-b9aa-e5d2ca91382f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.832995] env[68279]: DEBUG nova.network.neutron [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Successfully updated port: 20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.895029] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963165, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.932478] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2b869d7-cd0f-4fa3-af63-c80ff7f2a6af tempest-ServersTestBootFromVolume-1615053590 tempest-ServersTestBootFromVolume-1615053590-project-member] Lock "e5565f0d-ed60-4ac8-bba1-ab46b337dd90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.133s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.008744] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.115392] env[68279]: DEBUG oslo_vmware.api [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176868} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.115715] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.115917] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 868.116188] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 868.136892] env[68279]: INFO nova.scheduler.client.report [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted allocations for instance eccc5882-2c8b-456d-bbd2-d9ed22777a77 [ 868.244145] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9569c-f12b-a3cc-a785-dabbfafa4213, 'name': SearchDatastore_Task, 'duration_secs': 0.009702} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.245674] env[68279]: DEBUG oslo_concurrency.lockutils [req-9b3f93ef-bd91-4a71-8bff-63216c96cb49 req-d2a9c62d-22f0-4337-80a5-f42c2adb47c7 service nova] Releasing lock "refresh_cache-8aa8c866-4807-4a06-904e-53c149047d65" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.246028] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47f8bc02-1c42-4e08-a5aa-98fe123fdf86 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.252067] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 868.252067] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52577f53-98ce-16f2-62ad-021d3bcbc2cd" [ 868.252067] env[68279]: _type = "Task" [ 868.252067] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.260946] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52577f53-98ce-16f2-62ad-021d3bcbc2cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.337931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.337931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.337931] env[68279]: DEBUG nova.network.neutron [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 868.395268] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963165, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.408663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8bf346f6-f583-411d-bf10-7c4775842566 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.409909] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.648s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.410153] env[68279]: DEBUG nova.objects.instance [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lazy-loading 'resources' on Instance uuid f807e45c-76d8-46a6-a30b-011e7b8df6a4 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.642111] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.651718] env[68279]: DEBUG nova.compute.manager [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.652974] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf6c577-b824-4f19-b60d-085171c03497 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.763488] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52577f53-98ce-16f2-62ad-021d3bcbc2cd, 'name': SearchDatastore_Task, 'duration_secs': 0.012227} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.764337] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.764337] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 8aa8c866-4807-4a06-904e-53c149047d65/8aa8c866-4807-4a06-904e-53c149047d65.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.764499] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-853fd0de-a1f8-4d7b-80f6-6ca9e06a3d1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.772246] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 868.772246] env[68279]: value = "task-2963168" [ 868.772246] env[68279]: _type = "Task" [ 868.772246] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.782704] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.900436] env[68279]: DEBUG oslo_vmware.api [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963165, 'name': PowerOnVM_Task, 'duration_secs': 1.262382} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.900841] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.901361] env[68279]: INFO nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Took 9.30 seconds to spawn the instance on the hypervisor. [ 868.901844] env[68279]: DEBUG nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.903628] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84988ea0-7b0c-4c68-921f-7a820e43ac1c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.959141] env[68279]: DEBUG nova.network.neutron [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.019630] env[68279]: DEBUG nova.compute.manager [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Received event network-vif-plugged-20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.019913] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] Acquiring lock "7d15a05a-f827-40a7-b182-5d2b553481c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.020071] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.020236] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.020400] env[68279]: DEBUG nova.compute.manager [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] No waiting events found dispatching network-vif-plugged-20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 869.020558] env[68279]: WARNING nova.compute.manager [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Received unexpected event network-vif-plugged-20f8cd48-6520-4f63-866e-b8e360f8b818 for instance with vm_state building and task_state spawning. [ 869.020715] env[68279]: DEBUG nova.compute.manager [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Received event network-changed-20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.020867] env[68279]: DEBUG nova.compute.manager [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Refreshing instance network info cache due to event network-changed-20f8cd48-6520-4f63-866e-b8e360f8b818. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 869.021181] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] Acquiring lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.166048] env[68279]: INFO nova.compute.manager [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] instance snapshotting [ 869.171821] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7146b4e8-1cf8-41af-8579-e943a3fecbd6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.196363] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157f7afb-a7db-47c7-a436-ae657d358f6a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.233876] env[68279]: DEBUG nova.network.neutron [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Updating instance_info_cache with network_info: [{"id": "20f8cd48-6520-4f63-866e-b8e360f8b818", "address": "fa:16:3e:21:25:63", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20f8cd48-65", "ovs_interfaceid": "20f8cd48-6520-4f63-866e-b8e360f8b818", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.288641] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963168, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.309394] env[68279]: DEBUG nova.compute.manager [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.309394] env[68279]: DEBUG nova.compute.manager [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing instance network info cache due to event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 869.309394] env[68279]: DEBUG oslo_concurrency.lockutils [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] Acquiring lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.309394] env[68279]: DEBUG oslo_concurrency.lockutils [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] Acquired lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.309727] env[68279]: DEBUG nova.network.neutron [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.428333] env[68279]: INFO nova.compute.manager [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Took 56.90 seconds to build instance. [ 869.567756] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d956cfe2-74f3-4953-ad51-284874fe62f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.575621] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d029330e-6394-40bf-944b-0c50bd634f1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.615725] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d477534f-954b-4353-9215-aa7d4f9e7165 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.628042] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23b40de-989d-4ead-bc77-b84a9cfe4df7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.646595] env[68279]: DEBUG nova.compute.provider_tree [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 869.657186] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e298f3-1740-0e72-ce7b-caf96ad9b1b9/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 869.658491] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb5abeb-6849-4634-9b2a-f1bee43eca8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.665248] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e298f3-1740-0e72-ce7b-caf96ad9b1b9/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 869.665452] env[68279]: ERROR oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e298f3-1740-0e72-ce7b-caf96ad9b1b9/disk-0.vmdk due to incomplete transfer. [ 869.665919] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6675a031-debb-4643-ad08-ef6a8b8db681 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.672888] env[68279]: DEBUG oslo_vmware.rw_handles [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e298f3-1740-0e72-ce7b-caf96ad9b1b9/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 869.673421] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Uploaded image cfebc35c-c617-4fc2-9a37-bdc659062679 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 869.674803] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 869.675508] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2ffa16a5-5dfa-4a2c-95d3-0d685fdbfd43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.682930] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 869.682930] env[68279]: value = "task-2963169" [ 869.682930] env[68279]: _type = "Task" [ 869.682930] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.693237] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963169, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.713243] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 869.713243] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-23f0ba58-58a0-45cb-9519-23012c2a33a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.720517] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 869.720517] env[68279]: value = "task-2963170" [ 869.720517] env[68279]: _type = "Task" [ 869.720517] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.728802] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963170, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.739610] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Releasing lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.740018] env[68279]: DEBUG nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Instance network_info: |[{"id": "20f8cd48-6520-4f63-866e-b8e360f8b818", "address": "fa:16:3e:21:25:63", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20f8cd48-65", "ovs_interfaceid": "20f8cd48-6520-4f63-866e-b8e360f8b818", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 869.740430] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] Acquired lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.740933] env[68279]: DEBUG nova.network.neutron [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Refreshing network info cache for port 20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.742822] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:25:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3fca0ab6-cc80-429f-9117-885f170135b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20f8cd48-6520-4f63-866e-b8e360f8b818', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.752374] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.753698] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.753971] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0f06dc7-d5a9-4a66-bbc8-16c4b62f514a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.775437] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.775437] env[68279]: value = "task-2963171" [ 869.775437] env[68279]: _type = "Task" [ 869.775437] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.788687] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.609387} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.792266] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 8aa8c866-4807-4a06-904e-53c149047d65/8aa8c866-4807-4a06-904e-53c149047d65.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.792502] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.792716] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963171, 'name': CreateVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.792971] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be77d1f0-86d3-4c51-a6ba-95836214432f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.799305] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 869.799305] env[68279]: value = "task-2963172" [ 869.799305] env[68279]: _type = "Task" [ 869.799305] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.809047] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.930623] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e8597a6-a1fa-464a-9f5f-8366bf7e3248 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.726s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.168577] env[68279]: ERROR nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [req-fc6ce358-d243-4dfe-9cd3-1edf2ebd9ad9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fc6ce358-d243-4dfe-9cd3-1edf2ebd9ad9"}]} [ 870.185084] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 870.195857] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963169, 'name': Destroy_Task, 'duration_secs': 0.346376} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.199025] env[68279]: DEBUG nova.network.neutron [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updated VIF entry in instance network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.199025] env[68279]: DEBUG nova.network.neutron [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap15317896-8b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.199025] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Destroyed the VM [ 870.199025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 870.199428] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 870.203022] env[68279]: DEBUG nova.compute.provider_tree [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.203022] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5e884d51-91f6-49c5-b0f0-c190b50171f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.207982] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 870.207982] env[68279]: value = "task-2963173" [ 870.207982] env[68279]: _type = "Task" [ 870.207982] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.217339] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963173, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.218332] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 870.229833] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963170, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.241438] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 870.289972] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963171, 'name': CreateVM_Task, 'duration_secs': 0.416762} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.294982] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.295967] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.296234] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.296636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 870.300342] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3adc6707-1f09-4057-8eec-b033d6dab4a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.308065] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 870.308065] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d696a9-538b-5edf-e60a-7771ed02b8e5" [ 870.308065] env[68279]: _type = "Task" [ 870.308065] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.318275] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068296} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.319097] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.320773] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1230d62-d205-4c1f-b3f1-19bc1e1e2ae3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.325884] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d696a9-538b-5edf-e60a-7771ed02b8e5, 'name': SearchDatastore_Task, 'duration_secs': 0.011717} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.326493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.326818] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.326959] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.327127] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.327308] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.327609] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e461265-7c78-49fe-9433-83ac5802b144 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.347665] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 8aa8c866-4807-4a06-904e-53c149047d65/8aa8c866-4807-4a06-904e-53c149047d65.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.350777] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8baf85c7-dc4c-4b11-82b2-c2d958bce2ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.369821] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.369821] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.372586] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c9bd5dd-7c89-41cb-bfe5-443ffa78dc7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.375977] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 870.375977] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5231ba31-3581-041d-53d5-7b50b3211afb" [ 870.375977] env[68279]: _type = "Task" [ 870.375977] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.379670] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 870.379670] env[68279]: value = "task-2963174" [ 870.379670] env[68279]: _type = "Task" [ 870.379670] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.387267] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.387267] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5231ba31-3581-041d-53d5-7b50b3211afb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.392630] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963174, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.599782] env[68279]: DEBUG nova.network.neutron [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Updated VIF entry in instance network info cache for port 20f8cd48-6520-4f63-866e-b8e360f8b818. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.600166] env[68279]: DEBUG nova.network.neutron [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Updating instance_info_cache with network_info: [{"id": "20f8cd48-6520-4f63-866e-b8e360f8b818", "address": "fa:16:3e:21:25:63", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20f8cd48-65", "ovs_interfaceid": "20f8cd48-6520-4f63-866e-b8e360f8b818", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.703969] env[68279]: DEBUG oslo_concurrency.lockutils [req-4f7be2e7-26aa-477e-b92f-5c43ef973d16 req-57631834-b227-4a21-9a8e-f1dc22292a11 service nova] Releasing lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.726035] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963173, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.735395] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963170, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.895667] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5231ba31-3581-041d-53d5-7b50b3211afb, 'name': SearchDatastore_Task, 'duration_secs': 0.009943} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.895667] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963174, 'name': ReconfigVM_Task, 'duration_secs': 0.446438} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.896899] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 8aa8c866-4807-4a06-904e-53c149047d65/8aa8c866-4807-4a06-904e-53c149047d65.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.897242] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31420d7a-03d2-4694-b981-5ad3b609ea24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.899633] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87849e47-b62d-4583-8138-c7b0d0bd855a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.907091] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 870.907091] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f07a5d-cfc6-3a1c-1906-3f4ca8b245a9" [ 870.907091] env[68279]: _type = "Task" [ 870.907091] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.908604] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 870.908604] env[68279]: value = "task-2963175" [ 870.908604] env[68279]: _type = "Task" [ 870.908604] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.926574] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f07a5d-cfc6-3a1c-1906-3f4ca8b245a9, 'name': SearchDatastore_Task} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.926744] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963175, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.926981] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.927209] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7d15a05a-f827-40a7-b182-5d2b553481c7/7d15a05a-f827-40a7-b182-5d2b553481c7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.927537] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-913949ff-fb0b-48d8-bf11-efeea77f21d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.936595] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 870.936595] env[68279]: value = "task-2963176" [ 870.936595] env[68279]: _type = "Task" [ 870.936595] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.945487] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.972143] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d084c4e4-a347-482f-836f-8c399a8e1921 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.980226] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b80cad4-523c-4957-adcb-9e0122f0d919 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.015670] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73be1011-39f1-46a2-8b00-ce3ae87385e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.023944] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f964db8e-f96a-441a-a69a-b45d1ec24c0c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.039998] env[68279]: DEBUG nova.compute.provider_tree [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.075637] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7480bd8a-2c3f-4a43-b0e1-e831a5e72498 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.083664] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Suspending the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 871.084513] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-acab9936-6923-4aa6-bb5b-a269cdd51523 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.089341] env[68279]: DEBUG oslo_vmware.api [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] Waiting for the task: (returnval){ [ 871.089341] env[68279]: value = "task-2963177" [ 871.089341] env[68279]: _type = "Task" [ 871.089341] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.098086] env[68279]: DEBUG oslo_vmware.api [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] Task: {'id': task-2963177, 'name': SuspendVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.103165] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2313a79-7655-4679-ad2f-776bf5cd2ad2 req-210fdf43-50de-45f3-a234-fd3f51ed6295 service nova] Releasing lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.118792] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.119118] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.222166] env[68279]: DEBUG oslo_vmware.api [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963173, 'name': RemoveSnapshot_Task, 'duration_secs': 0.782313} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.222457] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 871.222693] env[68279]: INFO nova.compute.manager [None req-ca31bb92-6bfd-44f7-a1a8-a2cb3fd70737 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Took 13.45 seconds to snapshot the instance on the hypervisor. [ 871.234364] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963170, 'name': CreateSnapshot_Task, 'duration_secs': 1.322065} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.234656] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 871.235486] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b0b20e-8fc8-438a-a8b5-9327df17a374 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.422635] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963175, 'name': Rename_Task, 'duration_secs': 0.171027} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.422948] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.423234] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ebbb8e3-9af9-49d9-ad17-ed6b150813da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.430272] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 871.430272] env[68279]: value = "task-2963178" [ 871.430272] env[68279]: _type = "Task" [ 871.430272] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.439020] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.446572] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963176, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.564798] env[68279]: ERROR nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [req-566f9778-2bbc-41ac-ba6c-166d3691c3c5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-566f9778-2bbc-41ac-ba6c-166d3691c3c5"}]} [ 871.584596] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 871.600136] env[68279]: DEBUG oslo_vmware.api [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] Task: {'id': task-2963177, 'name': SuspendVM_Task} progress is 58%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.600785] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 871.601066] env[68279]: DEBUG nova.compute.provider_tree [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.613793] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 871.621793] env[68279]: DEBUG nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.636527] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 871.755769] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 871.759510] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b1832882-963c-4a08-a279-aeef9677065b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.768632] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 871.768632] env[68279]: value = "task-2963179" [ 871.768632] env[68279]: _type = "Task" [ 871.768632] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.777605] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963179, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.948584] env[68279]: DEBUG oslo_vmware.api [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963178, 'name': PowerOnVM_Task, 'duration_secs': 0.49713} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.949294] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.949734] env[68279]: INFO nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Took 7.50 seconds to spawn the instance on the hypervisor. [ 871.949927] env[68279]: DEBUG nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 871.950882] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695b60ae-bb68-487e-a2ea-83e49c848ac1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.962467] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963176, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515603} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.962467] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7d15a05a-f827-40a7-b182-5d2b553481c7/7d15a05a-f827-40a7-b182-5d2b553481c7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.962467] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.962467] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d55d866a-b657-462a-86a8-568628c8199b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.972568] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 871.972568] env[68279]: value = "task-2963180" [ 871.972568] env[68279]: _type = "Task" [ 871.972568] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.985307] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963180, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.099542] env[68279]: DEBUG oslo_vmware.api [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] Task: {'id': task-2963177, 'name': SuspendVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.143758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.250156] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fc328b-4e20-4e9f-9a8c-ef09f09b3be6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.257863] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611d8a68-9629-415c-9feb-d9bba8ff9bb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.291907] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1607de-eb4a-465a-9303-bc8bf197bd4e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.304075] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b589c515-f5a1-4223-8948-b8e5e8f477f0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.308134] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963179, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.319408] env[68279]: DEBUG nova.compute.provider_tree [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.482838] env[68279]: INFO nova.compute.manager [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Took 54.20 seconds to build instance. [ 872.489063] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963180, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08098} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.489303] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.490234] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64989cde-c80f-4328-9d04-20cc7d05647a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.513600] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 7d15a05a-f827-40a7-b182-5d2b553481c7/7d15a05a-f827-40a7-b182-5d2b553481c7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.514556] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e427db1-cbec-43f8-87e9-d374cb5f3865 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.535124] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 872.535124] env[68279]: value = "task-2963181" [ 872.535124] env[68279]: _type = "Task" [ 872.535124] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.544337] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963181, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.600771] env[68279]: DEBUG oslo_vmware.api [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] Task: {'id': task-2963177, 'name': SuspendVM_Task, 'duration_secs': 1.0352} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.601086] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Suspended the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 872.601275] env[68279]: DEBUG nova.compute.manager [None req-f6d21acd-7a9d-4539-a5a1-161d1e427135 tempest-ServersAdminNegativeTestJSON-1102963569 tempest-ServersAdminNegativeTestJSON-1102963569-project-admin] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.602568] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3739a0fd-a520-4000-8eae-61d26b04fa7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.798644] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963179, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.864026] env[68279]: DEBUG nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 872.864026] env[68279]: DEBUG nova.compute.provider_tree [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 93 to 94 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 872.864026] env[68279]: DEBUG nova.compute.provider_tree [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.985722] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50d79fab-daa4-4e30-9a32-d53902f8ce3d tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "8aa8c866-4807-4a06-904e-53c149047d65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.094s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.047282] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963181, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.300548] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963179, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.369083] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.959s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.374369] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.590s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.374369] env[68279]: INFO nova.compute.claims [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.382038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.382038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.382038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.382038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.382038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.382397] env[68279]: INFO nova.compute.manager [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Terminating instance [ 873.399032] env[68279]: INFO nova.scheduler.client.report [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Deleted allocations for instance f807e45c-76d8-46a6-a30b-011e7b8df6a4 [ 873.546640] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963181, 'name': ReconfigVM_Task, 'duration_secs': 0.583261} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.546938] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 7d15a05a-f827-40a7-b182-5d2b553481c7/7d15a05a-f827-40a7-b182-5d2b553481c7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.547266] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68279) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 873.550088] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-68d5ab28-522f-4dd2-90e7-4c30b4348368 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.555311] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 873.555311] env[68279]: value = "task-2963182" [ 873.555311] env[68279]: _type = "Task" [ 873.555311] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.564602] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963182, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.802543] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963179, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.888366] env[68279]: DEBUG nova.compute.manager [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 873.888626] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.889799] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5352886-c172-444d-853d-00716005a27e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.898410] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.898721] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7287db2-321e-41b4-8fd7-7cb997f44da0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.907315] env[68279]: DEBUG oslo_vmware.api [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 873.907315] env[68279]: value = "task-2963183" [ 873.907315] env[68279]: _type = "Task" [ 873.907315] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.911512] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c7151c79-23b5-4c3e-932b-88e74c153c73 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "f807e45c-76d8-46a6-a30b-011e7b8df6a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.666s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.921911] env[68279]: DEBUG oslo_vmware.api [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2963183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.067628] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963182, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.061831} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.070028] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68279) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 874.070028] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d27e0b-f207-49f6-9a74-a3999f2f09f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.098741] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 7d15a05a-f827-40a7-b182-5d2b553481c7/ephemeral_0.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.099300] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e97d0a76-5028-4c2a-aa6d-41d7ee41176f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.117501] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 874.117501] env[68279]: value = "task-2963184" [ 874.117501] env[68279]: _type = "Task" [ 874.117501] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.127989] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963184, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.168138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.168138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.168138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.168138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.168138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.169269] env[68279]: INFO nova.compute.manager [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Terminating instance [ 874.303123] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963179, 'name': CloneVM_Task, 'duration_secs': 2.225391} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.303446] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Created linked-clone VM from snapshot [ 874.304162] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4affd26-dd3a-4ab2-8ba2-d7ae9f5123aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.311869] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Uploading image f3e10b9e-959d-4ce6-b0eb-9b57ccdf559b {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 874.337393] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 874.337393] env[68279]: value = "vm-594632" [ 874.337393] env[68279]: _type = "VirtualMachine" [ 874.337393] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 874.337699] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-95299b5f-f367-4b2c-881f-a057fd197ebc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.344543] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lease: (returnval){ [ 874.344543] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f31e-8ecc-fe15-3995-cda575639d68" [ 874.344543] env[68279]: _type = "HttpNfcLease" [ 874.344543] env[68279]: } obtained for exporting VM: (result){ [ 874.344543] env[68279]: value = "vm-594632" [ 874.344543] env[68279]: _type = "VirtualMachine" [ 874.344543] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 874.344768] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the lease: (returnval){ [ 874.344768] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f31e-8ecc-fe15-3995-cda575639d68" [ 874.344768] env[68279]: _type = "HttpNfcLease" [ 874.344768] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 874.351998] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 874.351998] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f31e-8ecc-fe15-3995-cda575639d68" [ 874.351998] env[68279]: _type = "HttpNfcLease" [ 874.351998] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 874.418175] env[68279]: DEBUG oslo_vmware.api [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2963183, 'name': PowerOffVM_Task, 'duration_secs': 0.225869} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.418474] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.418629] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.421482] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ceba7534-08c1-4fd9-98e0-50fc5e184146 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.487437] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.487810] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.488369] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Deleting the datastore file [datastore1] b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.488969] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-955db19f-c3da-49ce-b7c0-21bf2145f7fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.495602] env[68279]: DEBUG oslo_vmware.api [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for the task: (returnval){ [ 874.495602] env[68279]: value = "task-2963187" [ 874.495602] env[68279]: _type = "Task" [ 874.495602] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.507654] env[68279]: DEBUG oslo_vmware.api [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2963187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.629173] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963184, 'name': ReconfigVM_Task, 'duration_secs': 0.395257} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.629568] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 7d15a05a-f827-40a7-b182-5d2b553481c7/ephemeral_0.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.630128] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03f59685-b859-4b57-be02-cddc41475a35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.636651] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 874.636651] env[68279]: value = "task-2963188" [ 874.636651] env[68279]: _type = "Task" [ 874.636651] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.644732] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963188, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.675197] env[68279]: DEBUG nova.compute.manager [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.675197] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.675197] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797664b2-831e-4457-a235-481651c35bc4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.685962] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.686386] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf7af5cd-161f-4f57-aa2b-5b26bcf4b65d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.693842] env[68279]: DEBUG oslo_vmware.api [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 874.693842] env[68279]: value = "task-2963189" [ 874.693842] env[68279]: _type = "Task" [ 874.693842] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.705389] env[68279]: DEBUG oslo_vmware.api [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.858277] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 874.858277] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f31e-8ecc-fe15-3995-cda575639d68" [ 874.858277] env[68279]: _type = "HttpNfcLease" [ 874.858277] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 874.858560] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 874.858560] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f31e-8ecc-fe15-3995-cda575639d68" [ 874.858560] env[68279]: _type = "HttpNfcLease" [ 874.858560] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 874.859479] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5028443d-24f0-484f-9eda-99cef72a41f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.867436] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d874a6-fb56-ddd4-dd94-ea09a8c8d748/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 874.867761] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d874a6-fb56-ddd4-dd94-ea09a8c8d748/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 874.946640] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.946906] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.006065] env[68279]: DEBUG oslo_vmware.api [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Task: {'id': task-2963187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164868} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.008717] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.008958] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.009147] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.009328] env[68279]: INFO nova.compute.manager [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 875.009564] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.009955] env[68279]: DEBUG nova.compute.manager [-] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.010079] env[68279]: DEBUG nova.network.neutron [-] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.034984] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ee51db64-3116-4f84-af45-674a6a2d3b5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.127686] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6b575e-1c50-41b9-9c4e-a96c4eec070c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.141640] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809dbec0-095c-41b4-a15a-e9fd945d5cfa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.150291] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963188, 'name': Rename_Task, 'duration_secs': 0.212669} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.174460] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.175161] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a17ac39-b9ec-478c-94ed-80e4c1ea69a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.179025] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fb0010-ec73-49de-889b-3c397a010f6d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.190035] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3891a2b8-5f25-4055-958c-aab478f15711 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.190872] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 875.190872] env[68279]: value = "task-2963190" [ 875.190872] env[68279]: _type = "Task" [ 875.190872] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.204264] env[68279]: DEBUG nova.compute.provider_tree [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.214789] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963190, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.220788] env[68279]: DEBUG oslo_vmware.api [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963189, 'name': PowerOffVM_Task, 'duration_secs': 0.242859} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.220788] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.220788] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.220788] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-175e9550-7b97-4b08-983d-86a77b7a39a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.295026] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.295026] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.295183] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Deleting the datastore file [datastore1] 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.295919] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48f92706-6fc8-4646-abaa-6fe94cd80da9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.305178] env[68279]: DEBUG oslo_vmware.api [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for the task: (returnval){ [ 875.305178] env[68279]: value = "task-2963192" [ 875.305178] env[68279]: _type = "Task" [ 875.305178] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.315665] env[68279]: DEBUG oslo_vmware.api [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.452325] env[68279]: DEBUG nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.556629] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "8aa8c866-4807-4a06-904e-53c149047d65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.556629] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "8aa8c866-4807-4a06-904e-53c149047d65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.556629] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "8aa8c866-4807-4a06-904e-53c149047d65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.556629] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "8aa8c866-4807-4a06-904e-53c149047d65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.557354] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "8aa8c866-4807-4a06-904e-53c149047d65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.562397] env[68279]: INFO nova.compute.manager [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Terminating instance [ 875.703098] env[68279]: DEBUG nova.compute.manager [req-052b7eda-beff-4c70-a040-d4edacdfee80 req-a03884f7-0973-42c3-9347-f9be708c7b44 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Received event network-vif-deleted-bfea9973-01ad-4d9c-a033-e6abdbcd8c3c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.703962] env[68279]: INFO nova.compute.manager [req-052b7eda-beff-4c70-a040-d4edacdfee80 req-a03884f7-0973-42c3-9347-f9be708c7b44 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Neutron deleted interface bfea9973-01ad-4d9c-a033-e6abdbcd8c3c; detaching it from the instance and deleting it from the info cache [ 875.704394] env[68279]: DEBUG nova.network.neutron [req-052b7eda-beff-4c70-a040-d4edacdfee80 req-a03884f7-0973-42c3-9347-f9be708c7b44 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.708804] env[68279]: DEBUG nova.scheduler.client.report [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.723852] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963190, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.814386] env[68279]: DEBUG oslo_vmware.api [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Task: {'id': task-2963192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349319} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.814741] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.815117] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.815324] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.815514] env[68279]: INFO nova.compute.manager [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Took 1.14 seconds to destroy the instance on the hypervisor. [ 875.815775] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.815984] env[68279]: DEBUG nova.compute.manager [-] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.816119] env[68279]: DEBUG nova.network.neutron [-] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.988025] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.070539] env[68279]: DEBUG nova.compute.manager [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 876.071471] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.072464] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968c3cb6-dd63-4ed4-8b12-cabd39681579 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.082435] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.082667] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4c19c01-e7f9-47c6-9d3b-a9daeb6218f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.087017] env[68279]: DEBUG nova.network.neutron [-] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.091303] env[68279]: DEBUG oslo_vmware.api [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 876.091303] env[68279]: value = "task-2963193" [ 876.091303] env[68279]: _type = "Task" [ 876.091303] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.101715] env[68279]: DEBUG oslo_vmware.api [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.150426] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.150832] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.151221] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.151445] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.151656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.155949] env[68279]: INFO nova.compute.manager [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Terminating instance [ 876.202527] env[68279]: DEBUG oslo_vmware.api [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963190, 'name': PowerOnVM_Task, 'duration_secs': 0.666602} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.202793] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.202996] env[68279]: INFO nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Took 8.56 seconds to spawn the instance on the hypervisor. [ 876.203191] env[68279]: DEBUG nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.204068] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca19bb6-df30-4449-a23e-3f97eb90baa8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.208843] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3cd8ad46-c091-4fb2-b876-57717c1dd3f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.223317] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7db1c1f-b106-4fa0-ab7c-71bf23fffc43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.235390] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.864s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.236313] env[68279]: DEBUG nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 876.239194] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.420s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.241569] env[68279]: DEBUG nova.objects.instance [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lazy-loading 'resources' on Instance uuid 0daf31be-c547-46ae-aa91-f99e191e1c76 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 876.269801] env[68279]: DEBUG nova.compute.manager [req-052b7eda-beff-4c70-a040-d4edacdfee80 req-a03884f7-0973-42c3-9347-f9be708c7b44 service nova] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Detach interface failed, port_id=bfea9973-01ad-4d9c-a033-e6abdbcd8c3c, reason: Instance b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 876.590482] env[68279]: INFO nova.compute.manager [-] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Took 1.58 seconds to deallocate network for instance. [ 876.606093] env[68279]: DEBUG oslo_vmware.api [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963193, 'name': PowerOffVM_Task, 'duration_secs': 0.18025} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.606663] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.606853] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.607245] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92def343-c208-444b-b47e-d7caeef4c8e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.626352] env[68279]: DEBUG nova.network.neutron [-] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.663591] env[68279]: DEBUG nova.compute.manager [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 876.663591] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.663591] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483cdbf0-e4f9-4142-8a94-ffbb1040604c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.672910] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.673496] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3766c29-ed23-446a-af8d-517f48d877e0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.676018] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.676237] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.676420] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Deleting the datastore file [datastore2] 8aa8c866-4807-4a06-904e-53c149047d65 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.677222] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81e01a86-4dea-4b05-b253-e1254f1fbb58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.683574] env[68279]: DEBUG oslo_vmware.api [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for the task: (returnval){ [ 876.683574] env[68279]: value = "task-2963196" [ 876.683574] env[68279]: _type = "Task" [ 876.683574] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.695393] env[68279]: DEBUG oslo_vmware.api [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.725799] env[68279]: INFO nova.compute.manager [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Took 58.30 seconds to build instance. [ 876.743601] env[68279]: DEBUG nova.compute.utils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 876.747194] env[68279]: DEBUG nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.747384] env[68279]: DEBUG nova.network.neutron [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.751516] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.751516] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.751674] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Deleting the datastore file [datastore1] 777eda1c-ca3f-4db0-b6b9-5901de5781ff {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.752602] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38b98c27-295c-4a3e-a172-8948f2e376cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.761116] env[68279]: DEBUG oslo_vmware.api [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 876.761116] env[68279]: value = "task-2963197" [ 876.761116] env[68279]: _type = "Task" [ 876.761116] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.772222] env[68279]: DEBUG oslo_vmware.api [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.779450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.779829] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.779913] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.780109] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.780286] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.782991] env[68279]: INFO nova.compute.manager [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Terminating instance [ 876.798956] env[68279]: DEBUG nova.policy [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4d9c39cb0a142eab4370307dd41cf0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd517424aba641e4b867e440ba0ee7ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 877.108345] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.121199] env[68279]: DEBUG nova.network.neutron [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Successfully created port: 040cd84c-473e-4fee-b689-54b1128ae340 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.132832] env[68279]: INFO nova.compute.manager [-] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Took 1.32 seconds to deallocate network for instance. [ 877.196411] env[68279]: DEBUG oslo_vmware.api [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Task: {'id': task-2963196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206404} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.196411] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.196411] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.196411] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.196411] env[68279]: INFO nova.compute.manager [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Took 1.12 seconds to destroy the instance on the hypervisor. [ 877.196411] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.196411] env[68279]: DEBUG nova.compute.manager [-] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 877.196411] env[68279]: DEBUG nova.network.neutron [-] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 877.228076] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2becca61-b4e3-461e-b386-416373d2d8fe tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.240s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.252365] env[68279]: DEBUG nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 877.274291] env[68279]: DEBUG oslo_vmware.api [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263513} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.274584] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.274792] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.275009] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.275607] env[68279]: INFO nova.compute.manager [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Took 0.61 seconds to destroy the instance on the hypervisor. [ 877.275895] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.276135] env[68279]: DEBUG nova.compute.manager [-] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 877.276251] env[68279]: DEBUG nova.network.neutron [-] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 877.288419] env[68279]: DEBUG nova.compute.manager [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 877.288730] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.289924] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ddbb636-72f7-4698-a23b-9fbc88fb2a51 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.300846] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.301364] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d332dab6-2a46-4a5b-b8e1-cdfc7d358c29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.309847] env[68279]: DEBUG oslo_vmware.api [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 877.309847] env[68279]: value = "task-2963198" [ 877.309847] env[68279]: _type = "Task" [ 877.309847] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.320434] env[68279]: DEBUG oslo_vmware.api [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2963198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.375152] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679bb172-e9f9-4d12-83d5-f3a590e308b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.383768] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5b0afa-9e28-40f8-9245-b464e1bfb3c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.417183] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37e8028-05bc-4625-9ae3-23c1f48e1d45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.425181] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a787cae0-065e-44d5-8077-c732ebf4eea4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.439746] env[68279]: DEBUG nova.compute.provider_tree [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.640181] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.822642] env[68279]: DEBUG oslo_vmware.api [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2963198, 'name': PowerOffVM_Task, 'duration_secs': 0.233645} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.822916] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.823141] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 877.823474] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e764aaf5-6ee6-47a1-b628-2128a2323595 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.888818] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 877.889227] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 877.889366] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Deleting the datastore file [datastore2] 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 877.889569] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da5433e3-7de5-4dc0-b0e4-0807d72db185 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.897918] env[68279]: DEBUG oslo_vmware.api [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for the task: (returnval){ [ 877.897918] env[68279]: value = "task-2963200" [ 877.897918] env[68279]: _type = "Task" [ 877.897918] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.908843] env[68279]: DEBUG oslo_vmware.api [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2963200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.963646] env[68279]: ERROR nova.scheduler.client.report [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [req-bc8a0a27-3be2-4ba3-af67-3523aa95a9f9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bc8a0a27-3be2-4ba3-af67-3523aa95a9f9"}]} [ 877.980139] env[68279]: DEBUG nova.scheduler.client.report [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 877.996772] env[68279]: DEBUG nova.scheduler.client.report [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 877.997063] env[68279]: DEBUG nova.compute.provider_tree [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 152, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.011737] env[68279]: DEBUG nova.scheduler.client.report [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 878.039296] env[68279]: DEBUG nova.scheduler.client.report [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 878.264592] env[68279]: DEBUG nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 878.267562] env[68279]: DEBUG nova.compute.manager [req-1a77aace-351a-4ae3-af5b-e5553bbf4c9e req-4a9cac39-038e-4b09-964c-921dbd1a9548 service nova] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Received event network-vif-deleted-75ef3733-1b15-4793-9073-f9964cbea45d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 878.295515] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.295764] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.295920] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.296204] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.296394] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.296579] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.296795] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.296951] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.297159] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.297293] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.297470] env[68279]: DEBUG nova.virt.hardware [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.298413] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae7c930-e0e0-4ad3-b49e-f21723a065aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.306964] env[68279]: DEBUG nova.network.neutron [-] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.310432] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fde8574-3bb0-41da-a493-b546b60ad2b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.396113] env[68279]: DEBUG nova.compute.manager [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Received event network-changed-20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 878.396539] env[68279]: DEBUG nova.compute.manager [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Refreshing instance network info cache due to event network-changed-20f8cd48-6520-4f63-866e-b8e360f8b818. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 878.396539] env[68279]: DEBUG oslo_concurrency.lockutils [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] Acquiring lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.396711] env[68279]: DEBUG oslo_concurrency.lockutils [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] Acquired lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.396788] env[68279]: DEBUG nova.network.neutron [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Refreshing network info cache for port 20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.410581] env[68279]: DEBUG oslo_vmware.api [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Task: {'id': task-2963200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31848} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.411593] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.411593] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.411593] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.411593] env[68279]: INFO nova.compute.manager [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 878.411593] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.412070] env[68279]: DEBUG nova.compute.manager [-] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 878.412070] env[68279]: DEBUG nova.network.neutron [-] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.544260] env[68279]: DEBUG nova.network.neutron [-] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.632154] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d15e7c-b51c-4909-b52a-df43e3ef03ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.638710] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1d5326-ed4f-4d6e-8bbf-0f1b3ffacbfa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.676025] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298cf82f-0d31-4b09-9cab-dc1f975cac31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.679857] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d338059-66f8-44d1-9d8f-7bacddb2b82b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.695520] env[68279]: DEBUG nova.compute.provider_tree [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.810274] env[68279]: INFO nova.compute.manager [-] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Took 1.61 seconds to deallocate network for instance. [ 879.047560] env[68279]: INFO nova.compute.manager [-] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Took 1.77 seconds to deallocate network for instance. [ 879.155315] env[68279]: DEBUG nova.network.neutron [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Successfully updated port: 040cd84c-473e-4fee-b689-54b1128ae340 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.212530] env[68279]: DEBUG nova.network.neutron [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Updated VIF entry in instance network info cache for port 20f8cd48-6520-4f63-866e-b8e360f8b818. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.212881] env[68279]: DEBUG nova.network.neutron [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Updating instance_info_cache with network_info: [{"id": "20f8cd48-6520-4f63-866e-b8e360f8b818", "address": "fa:16:3e:21:25:63", "network": {"id": "4f817a56-71c3-404c-bb8f-08e4925f0c9d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1733186274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e382d6dee334cd2bcf097cbe56f1143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20f8cd48-65", "ovs_interfaceid": "20f8cd48-6520-4f63-866e-b8e360f8b818", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.233799] env[68279]: DEBUG nova.scheduler.client.report [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 879.234075] env[68279]: DEBUG nova.compute.provider_tree [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 95 to 96 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 879.234270] env[68279]: DEBUG nova.compute.provider_tree [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 879.320855] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.554848] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.657773] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-7f54f9a6-3236-44c1-b327-1941dbfa3ff0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.657983] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-7f54f9a6-3236-44c1-b327-1941dbfa3ff0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.658136] env[68279]: DEBUG nova.network.neutron [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.674444] env[68279]: DEBUG nova.network.neutron [-] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.715086] env[68279]: DEBUG oslo_concurrency.lockutils [req-0860570c-e34c-4b22-a776-60ff17bbd7f2 req-14dfcead-b509-420f-92ec-407f32eda2cc service nova] Releasing lock "refresh_cache-7d15a05a-f827-40a7-b182-5d2b553481c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.741826] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.503s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.744518] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.305s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.744668] env[68279]: DEBUG nova.objects.instance [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lazy-loading 'resources' on Instance uuid 4021edd3-346e-44e5-9419-38181cc91c6a {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.765260] env[68279]: INFO nova.scheduler.client.report [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Deleted allocations for instance 0daf31be-c547-46ae-aa91-f99e191e1c76 [ 880.178561] env[68279]: INFO nova.compute.manager [-] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Took 1.77 seconds to deallocate network for instance. [ 880.196319] env[68279]: DEBUG nova.network.neutron [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.275836] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d2620463-5c5f-4316-af19-40fe5da4e131 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.981s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.277803] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 51.010s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.277803] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.277803] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.277919] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.282318] env[68279]: INFO nova.compute.manager [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Terminating instance [ 880.358425] env[68279]: DEBUG nova.network.neutron [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Updating instance_info_cache with network_info: [{"id": "040cd84c-473e-4fee-b689-54b1128ae340", "address": "fa:16:3e:7f:ca:00", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap040cd84c-47", "ovs_interfaceid": "040cd84c-473e-4fee-b689-54b1128ae340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.436416] env[68279]: DEBUG nova.compute.manager [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Received event network-vif-deleted-5827d8c9-58c9-41f7-b9aa-e5d2ca91382f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.436613] env[68279]: DEBUG nova.compute.manager [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Received event network-vif-plugged-040cd84c-473e-4fee-b689-54b1128ae340 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.436796] env[68279]: DEBUG oslo_concurrency.lockutils [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] Acquiring lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.437015] env[68279]: DEBUG oslo_concurrency.lockutils [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.437178] env[68279]: DEBUG oslo_concurrency.lockutils [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.437342] env[68279]: DEBUG nova.compute.manager [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] No waiting events found dispatching network-vif-plugged-040cd84c-473e-4fee-b689-54b1128ae340 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.437499] env[68279]: WARNING nova.compute.manager [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Received unexpected event network-vif-plugged-040cd84c-473e-4fee-b689-54b1128ae340 for instance with vm_state building and task_state spawning. [ 880.437650] env[68279]: DEBUG nova.compute.manager [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Received event network-changed-040cd84c-473e-4fee-b689-54b1128ae340 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.437792] env[68279]: DEBUG nova.compute.manager [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Refreshing instance network info cache due to event network-changed-040cd84c-473e-4fee-b689-54b1128ae340. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 880.437951] env[68279]: DEBUG oslo_concurrency.lockutils [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] Acquiring lock "refresh_cache-7f54f9a6-3236-44c1-b327-1941dbfa3ff0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.589919] env[68279]: DEBUG nova.compute.manager [req-8c37e531-cfcd-492d-81bb-3ad100a35ae7 req-00d0b8a7-891e-42f3-b9f2-f25d63556929 service nova] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Received event network-vif-deleted-7bd78f37-cfff-4fc2-ae4b-525b7f226259 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.590266] env[68279]: DEBUG nova.compute.manager [req-8c37e531-cfcd-492d-81bb-3ad100a35ae7 req-00d0b8a7-891e-42f3-b9f2-f25d63556929 service nova] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Received event network-vif-deleted-c342af3f-5da3-465c-a8da-2b93c20697f7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.686996] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.774835] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babba015-f58e-4bc1-a2f8-4ca892401a3f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.783477] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba4d760-fe10-461c-b257-bd99b76cec78 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.789582] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.789752] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquired lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.789933] env[68279]: DEBUG nova.network.neutron [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 880.817830] env[68279]: DEBUG nova.compute.utils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Can not refresh info_cache because instance was not found {{(pid=68279) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 880.820627] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a7da26-0b17-4d31-b599-417b087c0a43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.829720] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27119558-ab80-46cd-8e5f-472a446f42f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.844506] env[68279]: DEBUG nova.compute.provider_tree [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.846649] env[68279]: DEBUG nova.network.neutron [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.863042] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-7f54f9a6-3236-44c1-b327-1941dbfa3ff0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.863042] env[68279]: DEBUG nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Instance network_info: |[{"id": "040cd84c-473e-4fee-b689-54b1128ae340", "address": "fa:16:3e:7f:ca:00", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap040cd84c-47", "ovs_interfaceid": "040cd84c-473e-4fee-b689-54b1128ae340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 880.863042] env[68279]: DEBUG oslo_concurrency.lockutils [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] Acquired lock "refresh_cache-7f54f9a6-3236-44c1-b327-1941dbfa3ff0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.863042] env[68279]: DEBUG nova.network.neutron [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Refreshing network info cache for port 040cd84c-473e-4fee-b689-54b1128ae340 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 880.864182] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:ca:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55764410-260e-4339-a020-6b30995584bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '040cd84c-473e-4fee-b689-54b1128ae340', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.873206] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.876282] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 880.876781] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9276ea8-26e4-4e14-850d-213426065801 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.899799] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.899799] env[68279]: value = "task-2963201" [ 880.899799] env[68279]: _type = "Task" [ 880.899799] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.908397] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963201, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.928393] env[68279]: DEBUG nova.network.neutron [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.993690] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.994151] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.994265] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.994416] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.995055] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.997563] env[68279]: INFO nova.compute.manager [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Terminating instance [ 881.190010] env[68279]: DEBUG nova.network.neutron [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Updated VIF entry in instance network info cache for port 040cd84c-473e-4fee-b689-54b1128ae340. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 881.190377] env[68279]: DEBUG nova.network.neutron [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Updating instance_info_cache with network_info: [{"id": "040cd84c-473e-4fee-b689-54b1128ae340", "address": "fa:16:3e:7f:ca:00", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap040cd84c-47", "ovs_interfaceid": "040cd84c-473e-4fee-b689-54b1128ae340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.350205] env[68279]: DEBUG nova.scheduler.client.report [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 881.410459] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963201, 'name': CreateVM_Task, 'duration_secs': 0.34258} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.410641] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 881.411361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.411529] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.411910] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 881.412125] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-833509fb-b2eb-48b5-8170-3a63bdab950a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.417083] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 881.417083] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c29a9e-30a8-3657-8ab3-3b08b2789f18" [ 881.417083] env[68279]: _type = "Task" [ 881.417083] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.426057] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c29a9e-30a8-3657-8ab3-3b08b2789f18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.430593] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Releasing lock "refresh_cache-0daf31be-c547-46ae-aa91-f99e191e1c76" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.431025] env[68279]: DEBUG nova.compute.manager [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 881.431243] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.431514] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91c00690-7ef0-46cd-9140-be792b7e357a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.440678] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2bb73c-4522-4daf-a790-8466c28c2b37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.474082] env[68279]: WARNING nova.virt.vmwareapi.vmops [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0daf31be-c547-46ae-aa91-f99e191e1c76 could not be found. [ 881.474323] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 881.474502] env[68279]: INFO nova.compute.manager [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Took 0.04 seconds to destroy the instance on the hypervisor. [ 881.474751] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 881.475024] env[68279]: DEBUG nova.compute.manager [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 881.475130] env[68279]: DEBUG nova.network.neutron [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 881.497433] env[68279]: DEBUG nova.network.neutron [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.502669] env[68279]: DEBUG nova.compute.manager [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 881.503721] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.503891] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a0eeb9-80a4-456f-a75c-75ea400d4a82 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.512454] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.512730] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-051933bc-9e50-4838-af7c-f98f9b778118 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.519712] env[68279]: DEBUG oslo_vmware.api [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 881.519712] env[68279]: value = "task-2963202" [ 881.519712] env[68279]: _type = "Task" [ 881.519712] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.528338] env[68279]: DEBUG oslo_vmware.api [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.693778] env[68279]: DEBUG oslo_concurrency.lockutils [req-5e6d538d-8612-466f-ba0a-798d2ef991c6 req-d6ed5a68-30c2-4520-a87f-6317869fa691 service nova] Releasing lock "refresh_cache-7f54f9a6-3236-44c1-b327-1941dbfa3ff0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.855829] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.111s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.857985] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.271s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.857985] env[68279]: DEBUG nova.objects.instance [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lazy-loading 'resources' on Instance uuid 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.879374] env[68279]: INFO nova.scheduler.client.report [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted allocations for instance 4021edd3-346e-44e5-9419-38181cc91c6a [ 881.929312] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c29a9e-30a8-3657-8ab3-3b08b2789f18, 'name': SearchDatastore_Task, 'duration_secs': 0.017069} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.930573] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.931218] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.931572] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.931844] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.932160] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.932813] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10e8f85a-cb46-476f-a395-4bfaf1ee2fa0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.947020] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.947020] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 881.947020] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4b21c98-9a2b-4345-b14c-256fa9ed228f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.952804] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 881.952804] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e3629-2922-c98c-97ac-6be299215974" [ 881.952804] env[68279]: _type = "Task" [ 881.952804] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.961253] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e3629-2922-c98c-97ac-6be299215974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.001053] env[68279]: DEBUG nova.network.neutron [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.030836] env[68279]: DEBUG oslo_vmware.api [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963202, 'name': PowerOffVM_Task, 'duration_secs': 0.223233} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.031157] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 882.031373] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 882.031640] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26b99eb2-26fe-4673-b302-79fb71c1f22a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.101136] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 882.101275] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 882.101495] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Deleting the datastore file [datastore1] b2e272b3-520a-4ef7-8141-a9d55739d6b9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.101728] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5eae518-d891-46b3-a8d8-fe1811c5e4d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.109593] env[68279]: DEBUG oslo_vmware.api [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for the task: (returnval){ [ 882.109593] env[68279]: value = "task-2963204" [ 882.109593] env[68279]: _type = "Task" [ 882.109593] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.119137] env[68279]: DEBUG oslo_vmware.api [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.391179] env[68279]: DEBUG oslo_concurrency.lockutils [None req-904fc538-d1e9-424b-8396-c43cd4628ccb tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "4021edd3-346e-44e5-9419-38181cc91c6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.038s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.469521] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e3629-2922-c98c-97ac-6be299215974, 'name': SearchDatastore_Task, 'duration_secs': 0.017245} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.470675] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b32f53e-5e45-41d0-b0d6-0532f3d9bcbb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.480274] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 882.480274] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52692214-e935-9887-4efe-f6df7186fb8a" [ 882.480274] env[68279]: _type = "Task" [ 882.480274] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.493045] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52692214-e935-9887-4efe-f6df7186fb8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.505868] env[68279]: INFO nova.compute.manager [-] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Took 1.03 seconds to deallocate network for instance. [ 882.622229] env[68279]: DEBUG oslo_vmware.api [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Task: {'id': task-2963204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34535} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.624964] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.625185] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.625390] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.625608] env[68279]: INFO nova.compute.manager [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 882.625888] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 882.626335] env[68279]: DEBUG nova.compute.manager [-] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 882.627305] env[68279]: DEBUG nova.network.neutron [-] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 882.822018] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d874a6-fb56-ddd4-dd94-ea09a8c8d748/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 882.822018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ec35d1-996f-4be4-9eb3-b84f1b04d9e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.830474] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d874a6-fb56-ddd4-dd94-ea09a8c8d748/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 882.832057] env[68279]: ERROR oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d874a6-fb56-ddd4-dd94-ea09a8c8d748/disk-0.vmdk due to incomplete transfer. [ 882.832057] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cdc7413f-2c43-400d-980b-6772fd863744 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.839647] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d874a6-fb56-ddd4-dd94-ea09a8c8d748/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 882.839857] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Uploaded image f3e10b9e-959d-4ce6-b0eb-9b57ccdf559b to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 882.846018] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 882.846018] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a9c34f8a-5b1f-45d1-9cb9-cdb9d547b79e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.852596] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 882.852596] env[68279]: value = "task-2963205" [ 882.852596] env[68279]: _type = "Task" [ 882.852596] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.872306] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963205, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.914020] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad420468-aec2-40af-8a8c-32038127a393 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.923432] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefa691f-4216-43cf-8ed0-b8abe9552d61 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.953977] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b70878-dd9c-4763-b118-a17fc617fb6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.964029] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae18968-6016-418d-b23d-280b1b3938e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.983081] env[68279]: DEBUG nova.compute.provider_tree [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.998224] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52692214-e935-9887-4efe-f6df7186fb8a, 'name': SearchDatastore_Task, 'duration_secs': 0.01435} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.998624] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.999018] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7f54f9a6-3236-44c1-b327-1941dbfa3ff0/7f54f9a6-3236-44c1-b327-1941dbfa3ff0.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.999648] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f74fdd0a-b85d-466e-89cc-1a4d8691e490 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.008778] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 883.008778] env[68279]: value = "task-2963206" [ 883.008778] env[68279]: _type = "Task" [ 883.008778] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.013876] env[68279]: INFO nova.compute.manager [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance disappeared during terminate [ 883.013876] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97ff4dc0-5a7d-42c0-9ea1-dc77ba812d46 tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "0daf31be-c547-46ae-aa91-f99e191e1c76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.737s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.018373] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.025540] env[68279]: DEBUG nova.compute.manager [req-74225ea5-cc0b-4714-87ad-b797ce619b91 req-1478feb6-4eee-42ef-a8e9-de0c39f4ff99 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Received event network-vif-deleted-ad06c5c0-cc93-4b02-968c-9e81681ae50a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.025740] env[68279]: INFO nova.compute.manager [req-74225ea5-cc0b-4714-87ad-b797ce619b91 req-1478feb6-4eee-42ef-a8e9-de0c39f4ff99 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Neutron deleted interface ad06c5c0-cc93-4b02-968c-9e81681ae50a; detaching it from the instance and deleting it from the info cache [ 883.025909] env[68279]: DEBUG nova.network.neutron [req-74225ea5-cc0b-4714-87ad-b797ce619b91 req-1478feb6-4eee-42ef-a8e9-de0c39f4ff99 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.366886] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963205, 'name': Destroy_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.471488] env[68279]: DEBUG nova.network.neutron [-] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.491249] env[68279]: DEBUG nova.scheduler.client.report [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.518844] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963206, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.528623] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bbbe762-eae4-4586-8c08-92ecc0b16816 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.538716] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d932c1-1228-4e4a-93b1-12ef649ab3b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.578139] env[68279]: DEBUG nova.compute.manager [req-74225ea5-cc0b-4714-87ad-b797ce619b91 req-1478feb6-4eee-42ef-a8e9-de0c39f4ff99 service nova] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Detach interface failed, port_id=ad06c5c0-cc93-4b02-968c-9e81681ae50a, reason: Instance b2e272b3-520a-4ef7-8141-a9d55739d6b9 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 883.871905] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963205, 'name': Destroy_Task, 'duration_secs': 0.552896} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.871905] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Destroyed the VM [ 883.871905] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 883.872250] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3538ca08-2bce-4ba4-8b82-33595b2817cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.878984] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 883.878984] env[68279]: value = "task-2963207" [ 883.878984] env[68279]: _type = "Task" [ 883.878984] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.596793] env[68279]: INFO nova.compute.manager [-] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Took 1.97 seconds to deallocate network for instance. [ 884.600022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.740s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.600373] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963207, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.600490] env[68279]: WARNING oslo_vmware.common.loopingcall [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] task run outlasted interval by 0.221298 sec [ 884.602239] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 50.639s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.619573] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.824707} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.622786] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7f54f9a6-3236-44c1-b327-1941dbfa3ff0/7f54f9a6-3236-44c1-b327-1941dbfa3ff0.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.623043] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.623322] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963207, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.623595] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f0052fb-3be5-40a0-8f78-9c2ff1499a35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.633072] env[68279]: INFO nova.scheduler.client.report [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Deleted allocations for instance 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7 [ 884.638018] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 884.638018] env[68279]: value = "task-2963208" [ 884.638018] env[68279]: _type = "Task" [ 884.638018] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.649653] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.123885] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.130285] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963207, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.144965] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f1c5f3dd-9de8-4e24-a26c-04e3730493e7 tempest-VolumesAssistedSnapshotsTest-844457537 tempest-VolumesAssistedSnapshotsTest-844457537-project-member] Lock "6ca13774-f4db-4c9c-9da7-b773ce6cc6e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.253s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.149919] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090925} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.150245] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.151132] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965be445-9649-421a-81d0-1728c2e4c3e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.176379] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 7f54f9a6-3236-44c1-b327-1941dbfa3ff0/7f54f9a6-3236-44c1-b327-1941dbfa3ff0.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.177408] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9866c981-54a4-427e-8d78-5890cdf26edc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.201445] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 885.201445] env[68279]: value = "task-2963209" [ 885.201445] env[68279]: _type = "Task" [ 885.201445] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.210652] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.617954] env[68279]: DEBUG oslo_vmware.api [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963207, 'name': RemoveSnapshot_Task, 'duration_secs': 1.495925} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.619518] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 885.619518] env[68279]: INFO nova.compute.manager [None req-c816f414-728b-4d4f-8c23-f3b1ed535d5c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Took 16.45 seconds to snapshot the instance on the hypervisor. [ 885.625490] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Applying migration context for instance 665d932d-1068-4bb2-835c-2184a80753d1 as it has an incoming, in-progress migration 7ecdeeb2-b081-4af5-b57a-62f05991fa32. Migration status is reverting {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 885.627546] env[68279]: INFO nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating resource usage from migration 7ecdeeb2-b081-4af5-b57a-62f05991fa32 [ 885.652966] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 50e08259-7915-49bb-b137-5cc6e9d53c16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.652966] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.652966] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 336b7399-b64e-411f-99bc-ba0d292e371a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.652966] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.652966] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.652966] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 95f0aeaa-75ab-4fd9-b28d-e43703429167 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.653245] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.653245] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.653309] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance f7db383a-648a-4984-ae25-72bc2ccfe369 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.653429] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.653540] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance b2e272b3-520a-4ef7-8141-a9d55739d6b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.653648] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance fe92e176-222c-4c46-a254-1c12e21c68d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.653756] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance cfaee7e2-6929-4d8c-8614-e19e0055f2fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.653868] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance d61b2c4f-942a-4e29-8cac-11bc0750605a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.653976] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance c62a0d0e-8869-482a-a687-c628b96d6e22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.654113] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.654230] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e3763645-5a78-4929-98a3-108e72071211 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.654377] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance b45f310f-e614-47db-9f6e-f35dd481137c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.654451] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance daccaa30-1011-4c7d-a668-05f9329ab4d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.654559] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 67466e30-5944-490c-a89b-2d32c59525be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.654666] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Migration 7ecdeeb2-b081-4af5-b57a-62f05991fa32 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 885.656069] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 665d932d-1068-4bb2-835c-2184a80753d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.656069] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 97c3000f-a3d8-45c1-b0a4-12eb2b22b572 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.656069] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.656069] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 777eda1c-ca3f-4db0-b6b9-5901de5781ff is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.656069] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 8aa8c866-4807-4a06-904e-53c149047d65 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 885.656069] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7d15a05a-f827-40a7-b182-5d2b553481c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.656069] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7f54f9a6-3236-44c1-b327-1941dbfa3ff0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 885.714108] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963209, 'name': ReconfigVM_Task, 'duration_secs': 0.337272} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.714108] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 7f54f9a6-3236-44c1-b327-1941dbfa3ff0/7f54f9a6-3236-44c1-b327-1941dbfa3ff0.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.714804] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92261403-b67b-4361-a181-83b21fd7d004 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.724802] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 885.724802] env[68279]: value = "task-2963210" [ 885.724802] env[68279]: _type = "Task" [ 885.724802] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.734398] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963210, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.158430] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 886.239182] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963210, 'name': Rename_Task, 'duration_secs': 0.148608} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.239622] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.240072] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bff7e238-af9b-45f8-bba5-1ae4fab620c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.250616] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 886.250616] env[68279]: value = "task-2963211" [ 886.250616] env[68279]: _type = "Task" [ 886.250616] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.259891] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963211, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.662179] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 5cbe4915-5b01-4424-96c8-f3225e512c89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 886.760847] env[68279]: DEBUG oslo_vmware.api [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963211, 'name': PowerOnVM_Task, 'duration_secs': 0.480398} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.761123] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.761342] env[68279]: INFO nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Took 8.50 seconds to spawn the instance on the hypervisor. [ 886.761532] env[68279]: DEBUG nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.762334] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ab7177-31db-43ac-9555-30001ba0713f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.169753] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 866eb440-4fc9-4708-8a3b-b53f2be3f6c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.283604] env[68279]: INFO nova.compute.manager [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Took 59.53 seconds to build instance. [ 887.676677] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7e34039c-c51a-4f9c-961c-144f6d8a5130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.785518] env[68279]: DEBUG oslo_concurrency.lockutils [None req-430952ba-ec26-4960-97b8-9a2ea0ac42be tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.824s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.182500] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 02f34ac7-9deb-4714-92cb-bb507fde1e74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 888.430993] env[68279]: DEBUG nova.compute.manager [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.431873] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29963918-2d05-491a-a425-ce581b5bd014 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.519521] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.520471] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.685539] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 19f693cd-b598-432d-acf5-64da9f640d5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 888.685897] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (40ba16cf-8244-4715-b8c1-975029462ee4): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 888.949888] env[68279]: INFO nova.compute.manager [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] instance snapshotting [ 888.952867] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c045217-fba8-4e9c-9e87-fd5645895802 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.975436] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a6b695-d22c-4c8b-ab2f-6666a41ea1d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.024505] env[68279]: DEBUG nova.compute.utils [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 889.189799] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 889.488495] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 889.488826] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-017860b8-3c99-4e02-96c5-722a917009b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.497889] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 889.497889] env[68279]: value = "task-2963212" [ 889.497889] env[68279]: _type = "Task" [ 889.497889] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.506722] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963212, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.527932] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.695956] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 65688756-ad94-437f-9a36-bd7e3f7f7a2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 889.697037] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 889.697037] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4096MB phys_disk=200GB used_disk=19GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 890.010518] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963212, 'name': CreateSnapshot_Task, 'duration_secs': 0.444953} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.010848] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 890.011665] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542a6ce1-19e8-40ca-808b-da3b7cd0e3e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.260217] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb679ec-274e-485e-8029-613539520576 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.270654] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d24bf62-ec94-4fc9-b763-af0acb819f7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.303518] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f583292-233c-4ec6-a0cf-41003843820e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.311954] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9325e4-c41b-4341-8323-b65a3475694d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.326422] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.541045] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 890.541403] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9edf2ee8-1e66-4e0f-ab16-a5e1f4f03abc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.552112] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 890.552112] env[68279]: value = "task-2963213" [ 890.552112] env[68279]: _type = "Task" [ 890.552112] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.562212] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963213, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.628976] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.629109] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.630059] env[68279]: INFO nova.compute.manager [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Attaching volume c221f4d9-7c4e-442f-9ec4-72c250a12223 to /dev/sdb [ 890.669764] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647108a3-037c-49a8-b7cf-0fd8937c8202 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.678241] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944e509d-523f-48d7-9db6-be54ac22744e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.695512] env[68279]: DEBUG nova.virt.block_device [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Updating existing volume attachment record: 15f23f06-03a6-47d7-b744-d97db2cad5b0 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 890.829579] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.063546] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963213, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.334535] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 891.334830] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.733s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.335104] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 55.466s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.336719] env[68279]: INFO nova.compute.claims [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.339928] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 891.340085] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Cleaning up deleted instances {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 891.565642] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963213, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.853284] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] There are 36 instances to clean {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 891.853567] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 0daf31be-c547-46ae-aa91-f99e191e1c76] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 892.064007] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963213, 'name': CloneVM_Task, 'duration_secs': 1.303587} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.064296] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Created linked-clone VM from snapshot [ 892.065042] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39f640d-f0cb-44dd-84e5-54f25c8f5cf7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.073928] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Uploading image 9d3d35d3-6bed-4531-997a-039e08ff8184 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 892.094964] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 892.094964] env[68279]: value = "vm-594635" [ 892.094964] env[68279]: _type = "VirtualMachine" [ 892.094964] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 892.095238] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7c08dc2d-b37f-4cc0-b2b8-85661d9f1bc7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.103889] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lease: (returnval){ [ 892.103889] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520255e6-7212-f2ee-144c-d73a880083b5" [ 892.103889] env[68279]: _type = "HttpNfcLease" [ 892.103889] env[68279]: } obtained for exporting VM: (result){ [ 892.103889] env[68279]: value = "vm-594635" [ 892.103889] env[68279]: _type = "VirtualMachine" [ 892.103889] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 892.104184] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the lease: (returnval){ [ 892.104184] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520255e6-7212-f2ee-144c-d73a880083b5" [ 892.104184] env[68279]: _type = "HttpNfcLease" [ 892.104184] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 892.111274] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 892.111274] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520255e6-7212-f2ee-144c-d73a880083b5" [ 892.111274] env[68279]: _type = "HttpNfcLease" [ 892.111274] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 892.359251] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 4021edd3-346e-44e5-9419-38181cc91c6a] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 892.617811] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 892.617811] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520255e6-7212-f2ee-144c-d73a880083b5" [ 892.617811] env[68279]: _type = "HttpNfcLease" [ 892.617811] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 892.618298] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 892.618298] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520255e6-7212-f2ee-144c-d73a880083b5" [ 892.618298] env[68279]: _type = "HttpNfcLease" [ 892.618298] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 892.618908] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6598e8e4-ab93-4bb5-ae71-655a0129543b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.627114] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521c4059-6436-4a18-5de8-02522cd1a11d/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 892.627315] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521c4059-6436-4a18-5de8-02522cd1a11d/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 892.730523] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2ed578ce-525e-4c7b-ae92-c47818666f80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.865036] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: f807e45c-76d8-46a6-a30b-011e7b8df6a4] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 892.892340] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4976428c-c283-4af6-9458-d03ff8718cc7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.903552] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20361050-b81f-4cfe-8322-c3b046658bf5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.935894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1302ff-5bcd-4aa9-9e5e-b4794ecd0521 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.944925] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bc6c3f-3a2f-42d9-b7bc-b57ee3838158 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.963204] env[68279]: DEBUG nova.compute.provider_tree [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.369102] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: ed86ef15-1941-40c5-8178-344a7b401b58] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 893.467958] env[68279]: DEBUG nova.scheduler.client.report [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.873192] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 1a604a32-78c1-49cf-bafd-e1dc94c8b3ae] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 893.974350] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.974940] env[68279]: DEBUG nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 893.977847] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 57.714s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.978274] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.980434] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 54.930s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.981941] env[68279]: INFO nova.compute.claims [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.022062] env[68279]: INFO nova.scheduler.client.report [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Deleted allocations for instance 336b7399-b64e-411f-99bc-ba0d292e371a [ 894.378758] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 43f629d6-bdc3-4345-97ec-26ce2c9d7be7] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 894.491336] env[68279]: DEBUG nova.compute.utils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 894.494402] env[68279]: DEBUG nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 894.494637] env[68279]: DEBUG nova.network.neutron [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.530028] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3333f147-2385-4228-84b8-293e28ea7b97 tempest-ServersTestFqdnHostnames-359420418 tempest-ServersTestFqdnHostnames-359420418-project-member] Lock "336b7399-b64e-411f-99bc-ba0d292e371a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 62.900s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.573842] env[68279]: DEBUG nova.policy [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98ee85328964497482886fb16ff6e25f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '182c7f7affa443dba0ce3affd30eed42', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.883148] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 7858163d-8e68-4565-b1e0-ecd2e9be350d] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 894.996697] env[68279]: DEBUG nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 895.261932] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 895.262239] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594637', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'name': 'volume-c221f4d9-7c4e-442f-9ec4-72c250a12223', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f54f9a6-3236-44c1-b327-1941dbfa3ff0', 'attached_at': '', 'detached_at': '', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'serial': 'c221f4d9-7c4e-442f-9ec4-72c250a12223'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 895.263203] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b95d7bc-908d-485b-a0e4-b3d8915b5733 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.292121] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d126ce4f-478d-4efa-b96c-54eefd6ee3f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.326942] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] volume-c221f4d9-7c4e-442f-9ec4-72c250a12223/volume-c221f4d9-7c4e-442f-9ec4-72c250a12223.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.330747] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5e5b33a-697f-416f-bddc-cc94624c94e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.352205] env[68279]: DEBUG oslo_vmware.api [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 895.352205] env[68279]: value = "task-2963219" [ 895.352205] env[68279]: _type = "Task" [ 895.352205] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.365867] env[68279]: DEBUG oslo_vmware.api [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963219, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.385092] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 01a624d3-782d-44cf-8a4e-05a85ac91c64] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 895.417923] env[68279]: DEBUG nova.network.neutron [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Successfully created port: 971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.605471] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559db920-93d7-4dda-b5c6-10dfe60384c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.615814] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166750a6-1637-4525-a41d-be7cc455a1e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.654559] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2366d2-f87d-405b-9b00-2063a9f6bea4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.664158] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86f2c2f-6872-4ef8-9da5-d2315e66fb2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.682086] env[68279]: DEBUG nova.compute.provider_tree [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.866496] env[68279]: DEBUG oslo_vmware.api [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963219, 'name': ReconfigVM_Task, 'duration_secs': 0.48591} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.866787] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Reconfigured VM instance instance-00000041 to attach disk [datastore1] volume-c221f4d9-7c4e-442f-9ec4-72c250a12223/volume-c221f4d9-7c4e-442f-9ec4-72c250a12223.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.871873] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6de8a89-d994-4851-af88-713abd63c81f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.889234] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: d452e3d2-1590-4352-8406-31d85b2921f4] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 895.891319] env[68279]: DEBUG oslo_vmware.api [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 895.891319] env[68279]: value = "task-2963220" [ 895.891319] env[68279]: _type = "Task" [ 895.891319] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.902532] env[68279]: DEBUG oslo_vmware.api [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963220, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.009642] env[68279]: DEBUG nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 896.114865] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 896.115156] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.115319] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 896.115498] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.115644] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 896.115815] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 896.116070] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 896.116253] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 896.116426] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 896.116608] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 896.116791] env[68279]: DEBUG nova.virt.hardware [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 896.118013] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c761f34e-f158-41ef-9adc-bf886c499c23 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.126469] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6af5c9-18cd-4d47-8355-3cf03dabce9a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.187038] env[68279]: DEBUG nova.scheduler.client.report [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.392860] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 05b94aa5-3efc-4790-9d98-c2658b8e8b4b] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 896.412389] env[68279]: DEBUG oslo_vmware.api [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963220, 'name': ReconfigVM_Task, 'duration_secs': 0.179117} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.412389] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594637', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'name': 'volume-c221f4d9-7c4e-442f-9ec4-72c250a12223', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f54f9a6-3236-44c1-b327-1941dbfa3ff0', 'attached_at': '', 'detached_at': '', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'serial': 'c221f4d9-7c4e-442f-9ec4-72c250a12223'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 896.691882] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.693080] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 57.192s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.693274] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.695400] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 54.714s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.695621] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.697455] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 50.259s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.697540] env[68279]: DEBUG nova.objects.instance [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 896.736395] env[68279]: INFO nova.scheduler.client.report [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Deleted allocations for instance f7db383a-648a-4984-ae25-72bc2ccfe369 [ 896.743546] env[68279]: INFO nova.scheduler.client.report [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleted allocations for instance 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a [ 896.899032] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: a332b35f-4f96-4f8f-aa9a-d7fadf9ede53] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 897.205926] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "feaff872-2677-4d04-bbf0-96a783332c4d" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.206365] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "feaff872-2677-4d04-bbf0-96a783332c4d" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.244662] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60defbc7-e9a2-4627-9590-8a89bd3b8572 tempest-AttachInterfacesUnderV243Test-1887643830 tempest-AttachInterfacesUnderV243Test-1887643830-project-member] Lock "f7db383a-648a-4984-ae25-72bc2ccfe369" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 61.358s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.252247] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e85fb8d8-20e2-4225-a015-c0c7f1d2f063 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 58.962s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.405542] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 33f3fc4a-319b-4dd9-90b5-05ee5483ac7f] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 897.469834] env[68279]: DEBUG nova.objects.instance [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'flavor' on Instance uuid 7f54f9a6-3236-44c1-b327-1941dbfa3ff0 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.495071] env[68279]: DEBUG nova.network.neutron [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Successfully updated port: 971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.501067] env[68279]: DEBUG nova.compute.manager [req-5eff50d4-adcc-4e69-abe0-7824e52f36d5 req-ba93d94b-7c60-4ba3-9e50-44db1a751b76 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Received event network-vif-plugged-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.501067] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eff50d4-adcc-4e69-abe0-7824e52f36d5 req-ba93d94b-7c60-4ba3-9e50-44db1a751b76 service nova] Acquiring lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.501067] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eff50d4-adcc-4e69-abe0-7824e52f36d5 req-ba93d94b-7c60-4ba3-9e50-44db1a751b76 service nova] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.501067] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eff50d4-adcc-4e69-abe0-7824e52f36d5 req-ba93d94b-7c60-4ba3-9e50-44db1a751b76 service nova] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.501067] env[68279]: DEBUG nova.compute.manager [req-5eff50d4-adcc-4e69-abe0-7824e52f36d5 req-ba93d94b-7c60-4ba3-9e50-44db1a751b76 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] No waiting events found dispatching network-vif-plugged-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 897.501067] env[68279]: WARNING nova.compute.manager [req-5eff50d4-adcc-4e69-abe0-7824e52f36d5 req-ba93d94b-7c60-4ba3-9e50-44db1a751b76 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Received unexpected event network-vif-plugged-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 for instance with vm_state building and task_state spawning. [ 897.720017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "feaff872-2677-4d04-bbf0-96a783332c4d" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.513s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.724022] env[68279]: DEBUG nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 897.725137] env[68279]: DEBUG oslo_concurrency.lockutils [None req-784d2411-aef9-4d78-a727-587b1380b368 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.028s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.726640] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.247s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.728219] env[68279]: INFO nova.compute.claims [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.910210] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 4e157792-f910-492c-ab29-dd3f86cb96a8] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 897.980811] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9833c522-f54f-4d38-ba34-a4780978b82f tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.351s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.001548] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.001548] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.001548] env[68279]: DEBUG nova.network.neutron [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.228158] env[68279]: DEBUG nova.compute.utils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 898.229672] env[68279]: DEBUG nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 898.229848] env[68279]: DEBUG nova.network.neutron [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 898.339218] env[68279]: DEBUG nova.policy [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4c3c915f5b1844299ffbd5aa520cded6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6653e6008806494090ed458c550c3eca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 898.387183] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.387460] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.387674] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.387855] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.388119] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.390271] env[68279]: INFO nova.compute.manager [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Terminating instance [ 898.414205] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e5565f0d-ed60-4ac8-bba1-ab46b337dd90] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 898.585178] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "192734ca-f549-4461-a05a-5f00f0639977" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.585178] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "192734ca-f549-4461-a05a-5f00f0639977" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.712808] env[68279]: DEBUG nova.network.neutron [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.733216] env[68279]: DEBUG nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 898.894861] env[68279]: DEBUG nova.compute.manager [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 898.895570] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.895570] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ded34cea-4d30-4fe0-8905-e5c689c16894 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.908482] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 898.908482] env[68279]: value = "task-2963221" [ 898.908482] env[68279]: _type = "Task" [ 898.908482] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.917676] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: d8eca7ac-744e-469c-9a87-901f0641f4f2] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 898.923097] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.953278] env[68279]: DEBUG nova.network.neutron [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Successfully created port: 8afeda32-cf4a-4c25-8095-c1df322b3c5a {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.989513] env[68279]: DEBUG nova.network.neutron [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.088602] env[68279]: DEBUG nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.395019] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bb57eb-7786-47f0-a498-81ae9ea27e02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.403831] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79853d0e-43f5-4505-874a-97ed0419fa6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.438730] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 1d16a5c5-981b-474e-8159-820ac6fcc42d] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 899.447233] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18062e62-418b-4470-acb1-53bfe22a00f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.456248] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963221, 'name': PowerOffVM_Task, 'duration_secs': 0.257652} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.458538] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.458764] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 899.458957] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594637', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'name': 'volume-c221f4d9-7c4e-442f-9ec4-72c250a12223', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f54f9a6-3236-44c1-b327-1941dbfa3ff0', 'attached_at': '', 'detached_at': '', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'serial': 'c221f4d9-7c4e-442f-9ec4-72c250a12223'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 899.459888] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f8b248-a6d9-4df8-8a9b-2799a2791f52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.463443] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c78d81-5182-4447-a8ea-7c005fb8511a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.498962] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48483136-9e65-4ee4-9df5-e5df4af4d9e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.503174] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.503174] env[68279]: DEBUG nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Instance network_info: |[{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 899.503312] env[68279]: DEBUG nova.compute.provider_tree [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.505135] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:95:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '971e9f68-2eb2-418a-92ac-ab9f6e6b6859', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.512560] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.514206] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.514316] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa304d04-ff64-4baf-ac7c-871626989f9c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.542031] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bf1f36-08f6-42f3-97cb-b8d48acaa7ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.546686] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.546686] env[68279]: value = "task-2963222" [ 899.546686] env[68279]: _type = "Task" [ 899.546686] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.568793] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44a90dc-3867-40fa-9f72-08488a8ea842 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.575307] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963222, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.594024] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] The volume has not been displaced from its original location: [datastore1] volume-c221f4d9-7c4e-442f-9ec4-72c250a12223/volume-c221f4d9-7c4e-442f-9ec4-72c250a12223.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 899.599690] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Reconfiguring VM instance instance-00000041 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 899.605158] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c91126bd-1854-45d5-9c6b-9800d02072e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.630786] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 899.630786] env[68279]: value = "task-2963223" [ 899.630786] env[68279]: _type = "Task" [ 899.630786] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.641804] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.643301] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.653189] env[68279]: DEBUG nova.compute.manager [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Received event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.653598] env[68279]: DEBUG nova.compute.manager [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing instance network info cache due to event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 899.654010] env[68279]: DEBUG oslo_concurrency.lockutils [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] Acquiring lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.654292] env[68279]: DEBUG oslo_concurrency.lockutils [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] Acquired lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.654603] env[68279]: DEBUG nova.network.neutron [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.746981] env[68279]: DEBUG nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 899.776680] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 899.776888] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.777073] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 899.777278] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.777469] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 899.777594] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 899.777827] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 899.778026] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 899.778184] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 899.778351] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 899.778520] env[68279]: DEBUG nova.virt.hardware [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 899.779445] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76879370-bb7f-4e5d-86a7-5296ef0f138c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.791126] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0158a8aa-0c40-4079-93bb-63457e385ebc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.946581] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: f927c34a-f155-4a1f-8151-b16a3cb3e9a1] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.015488] env[68279]: DEBUG nova.scheduler.client.report [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.060711] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963222, 'name': CreateVM_Task, 'duration_secs': 0.485882} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.061336] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.062147] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.062319] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.062719] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 900.062990] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d032645-a887-499f-abbd-87026b339e49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.069327] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 900.069327] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5201bde1-636d-ea3e-d0ce-5fa75be0cf09" [ 900.069327] env[68279]: _type = "Task" [ 900.069327] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.079353] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5201bde1-636d-ea3e-d0ce-5fa75be0cf09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.141226] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963223, 'name': ReconfigVM_Task, 'duration_secs': 0.226539} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.141723] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Reconfigured VM instance instance-00000041 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 900.146520] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df9d6b74-b896-42f3-a6bf-ebccec9879f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.165030] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 900.165030] env[68279]: value = "task-2963224" [ 900.165030] env[68279]: _type = "Task" [ 900.165030] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.176876] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.376210] env[68279]: DEBUG nova.network.neutron [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updated VIF entry in instance network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.376589] env[68279]: DEBUG nova.network.neutron [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.450319] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 61392426-52b8-437e-ab3d-122d9335cd36] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.523059] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.523059] env[68279]: DEBUG nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 900.525269] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.459s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.525523] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.527715] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.113s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.527848] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.530791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 48.201s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.560419] env[68279]: INFO nova.scheduler.client.report [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted allocations for instance 95f0aeaa-75ab-4fd9-b28d-e43703429167 [ 900.562543] env[68279]: INFO nova.scheduler.client.report [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Deleted allocations for instance b45f310f-e614-47db-9f6e-f35dd481137c [ 900.589656] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5201bde1-636d-ea3e-d0ce-5fa75be0cf09, 'name': SearchDatastore_Task, 'duration_secs': 0.014456} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.589656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.589656] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.589656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.589656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.589656] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.589656] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44551513-4568-43ff-8d81-64e792ee00f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.601043] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.601043] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.602544] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f57471c-8303-41c4-bbe0-35afb3d356a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.611696] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 900.611696] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528eaa3c-be77-f23c-3e9a-1165f8b84d10" [ 900.611696] env[68279]: _type = "Task" [ 900.611696] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.623472] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528eaa3c-be77-f23c-3e9a-1165f8b84d10, 'name': SearchDatastore_Task, 'duration_secs': 0.010584} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.624168] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6193972d-6962-4492-adf9-7b7d5adfac62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.631167] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 900.631167] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aa7a68-2c28-2d07-1a99-3e531bc19e21" [ 900.631167] env[68279]: _type = "Task" [ 900.631167] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.641615] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aa7a68-2c28-2d07-1a99-3e531bc19e21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.670451] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521c4059-6436-4a18-5de8-02522cd1a11d/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 900.671408] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62339006-4985-4445-a21c-5705f26e2f99 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.681476] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521c4059-6436-4a18-5de8-02522cd1a11d/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 900.681667] env[68279]: ERROR oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521c4059-6436-4a18-5de8-02522cd1a11d/disk-0.vmdk due to incomplete transfer. [ 900.685420] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f3ce1f4d-622a-42d7-831a-6d8cee7c3a4a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.688175] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963224, 'name': ReconfigVM_Task, 'duration_secs': 0.347268} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.689219] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594637', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'name': 'volume-c221f4d9-7c4e-442f-9ec4-72c250a12223', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f54f9a6-3236-44c1-b327-1941dbfa3ff0', 'attached_at': '', 'detached_at': '', 'volume_id': 'c221f4d9-7c4e-442f-9ec4-72c250a12223', 'serial': 'c221f4d9-7c4e-442f-9ec4-72c250a12223'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 900.689517] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.690826] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca06ea4-ebb4-4b50-94da-2aa21ebf969c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.697961] env[68279]: DEBUG oslo_vmware.rw_handles [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521c4059-6436-4a18-5de8-02522cd1a11d/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 900.698210] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Uploaded image 9d3d35d3-6bed-4531-997a-039e08ff8184 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 900.701694] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 900.704419] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-27a48038-024e-416e-a78a-b8e3f6140d77 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.707424] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.707943] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f817d7f5-e397-4c18-bad2-7bbd38f03ceb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.717812] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 900.717812] env[68279]: value = "task-2963226" [ 900.717812] env[68279]: _type = "Task" [ 900.717812] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.730897] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963226, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.808282] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.808282] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.808282] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleting the datastore file [datastore2] 7f54f9a6-3236-44c1-b327-1941dbfa3ff0 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.808828] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17ca3e4c-a615-4c9e-9b66-9700c9689e0e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.816927] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 900.816927] env[68279]: value = "task-2963227" [ 900.816927] env[68279]: _type = "Task" [ 900.816927] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.827929] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963227, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.880077] env[68279]: DEBUG oslo_concurrency.lockutils [req-ec86cca4-a714-44ac-80ec-cadb388a8868 req-2ba7915b-03aa-4b0a-9dca-5c70e49ffb4a service nova] Releasing lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.953831] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 6e947ed2-a6aa-42d4-b97e-31db33f6d5f9] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.979980] env[68279]: DEBUG nova.network.neutron [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Successfully updated port: 8afeda32-cf4a-4c25-8095-c1df322b3c5a {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 901.072963] env[68279]: DEBUG nova.objects.instance [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lazy-loading 'migration_context' on Instance uuid 665d932d-1068-4bb2-835c-2184a80753d1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.075587] env[68279]: DEBUG nova.compute.utils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.080248] env[68279]: DEBUG nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.080990] env[68279]: DEBUG nova.network.neutron [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 901.086054] env[68279]: DEBUG oslo_concurrency.lockutils [None req-11437adf-32c5-4fa1-8de9-cae28278dfc2 tempest-ServersListShow2100Test-1428248124 tempest-ServersListShow2100Test-1428248124-project-member] Lock "b45f310f-e614-47db-9f6e-f35dd481137c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.913s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.088069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9eef11e4-ce91-40eb-9393-cfe3c550f4ab tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "95f0aeaa-75ab-4fd9-b28d-e43703429167" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.442s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.143036] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aa7a68-2c28-2d07-1a99-3e531bc19e21, 'name': SearchDatastore_Task, 'duration_secs': 0.011003} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.143323] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.143586] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.143844] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8f10abd-abc4-4dd0-bf4c-dfabef8f5d31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.151538] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 901.151538] env[68279]: value = "task-2963228" [ 901.151538] env[68279]: _type = "Task" [ 901.151538] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.160641] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.177037] env[68279]: DEBUG nova.policy [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655eae57bb1349c0a229c3b57f4d3446', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f947b60992d543c4b0bfee2553bfe357', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 901.231024] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963226, 'name': Destroy_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.328018] env[68279]: DEBUG oslo_vmware.api [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963227, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185996} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.328260] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.328460] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.328611] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.328789] env[68279]: INFO nova.compute.manager [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Took 2.43 seconds to destroy the instance on the hypervisor. [ 901.329082] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 901.329299] env[68279]: DEBUG nova.compute.manager [-] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 901.329399] env[68279]: DEBUG nova.network.neutron [-] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.458217] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: bf4e6484-d17d-4244-9163-1ef0012874b8] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 901.485297] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "refresh_cache-5cbe4915-5b01-4424-96c8-f3225e512c89" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.485453] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquired lock "refresh_cache-5cbe4915-5b01-4424-96c8-f3225e512c89" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.485662] env[68279]: DEBUG nova.network.neutron [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.587245] env[68279]: DEBUG nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 901.664357] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477688} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.668029] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.668267] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.668728] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce676f6d-0b37-492a-813b-66cb5d2d62e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.677047] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 901.677047] env[68279]: value = "task-2963229" [ 901.677047] env[68279]: _type = "Task" [ 901.677047] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.691313] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.733547] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963226, 'name': Destroy_Task, 'duration_secs': 0.555071} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.733547] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Destroyed the VM [ 901.733547] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 901.733547] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c14c0e4b-c61d-4a83-8900-42db2130a1e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.743077] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 901.743077] env[68279]: value = "task-2963230" [ 901.743077] env[68279]: _type = "Task" [ 901.743077] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.763932] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963230, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.877631] env[68279]: DEBUG nova.network.neutron [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Successfully created port: 58be91e3-be6b-4118-8032-e40c6f5f099d {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.961916] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 92786813-f4ab-4ff7-8597-aa1aa90eeb01] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 902.071017] env[68279]: DEBUG nova.network.neutron [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 902.090938] env[68279]: DEBUG nova.compute.manager [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Received event network-vif-plugged-8afeda32-cf4a-4c25-8095-c1df322b3c5a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 902.091539] env[68279]: DEBUG oslo_concurrency.lockutils [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] Acquiring lock "5cbe4915-5b01-4424-96c8-f3225e512c89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.091539] env[68279]: DEBUG oslo_concurrency.lockutils [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.091671] env[68279]: DEBUG oslo_concurrency.lockutils [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.091794] env[68279]: DEBUG nova.compute.manager [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] No waiting events found dispatching network-vif-plugged-8afeda32-cf4a-4c25-8095-c1df322b3c5a {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 902.092078] env[68279]: WARNING nova.compute.manager [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Received unexpected event network-vif-plugged-8afeda32-cf4a-4c25-8095-c1df322b3c5a for instance with vm_state building and task_state spawning. [ 902.092159] env[68279]: DEBUG nova.compute.manager [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Received event network-changed-8afeda32-cf4a-4c25-8095-c1df322b3c5a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 902.092310] env[68279]: DEBUG nova.compute.manager [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Refreshing instance network info cache due to event network-changed-8afeda32-cf4a-4c25-8095-c1df322b3c5a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 902.092488] env[68279]: DEBUG oslo_concurrency.lockutils [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] Acquiring lock "refresh_cache-5cbe4915-5b01-4424-96c8-f3225e512c89" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.189855] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072155} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.190721] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.191506] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892da792-7053-4c5f-aa25-b57029b2f35b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.194481] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3cefdd-6e8d-4793-841b-6cad37d88c07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.200031] env[68279]: DEBUG nova.compute.manager [req-43101d81-e399-4484-bdc9-9547b8bd0e01 req-2c5d353b-492a-4b22-b0bf-c4e13e2b395d service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Received event network-vif-deleted-040cd84c-473e-4fee-b689-54b1128ae340 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 902.200301] env[68279]: INFO nova.compute.manager [req-43101d81-e399-4484-bdc9-9547b8bd0e01 req-2c5d353b-492a-4b22-b0bf-c4e13e2b395d service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Neutron deleted interface 040cd84c-473e-4fee-b689-54b1128ae340; detaching it from the instance and deleting it from the info cache [ 902.200482] env[68279]: DEBUG nova.network.neutron [req-43101d81-e399-4484-bdc9-9547b8bd0e01 req-2c5d353b-492a-4b22-b0bf-c4e13e2b395d service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.223204] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.224743] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12b55f5-ff60-4b69-a5ff-dd4cd1e4f417 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.229461] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e8de652-ac29-4963-aab9-f29a50ea61c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.244463] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7011d6f8-9053-4d9c-814e-f3407a2ae9fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.289675] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ddacf6-93f4-4605-8444-34495107a6da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.292062] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 902.292062] env[68279]: value = "task-2963231" [ 902.292062] env[68279]: _type = "Task" [ 902.292062] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.292349] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963230, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.295070] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99602765-0a98-48a9-9cb1-79973802618e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.314343] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38d410a-f4b0-4c6a-ba02-2fcc64b2b181 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.322267] env[68279]: DEBUG nova.network.neutron [-] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.323477] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.334686] env[68279]: DEBUG nova.compute.provider_tree [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.352305] env[68279]: DEBUG nova.compute.manager [req-43101d81-e399-4484-bdc9-9547b8bd0e01 req-2c5d353b-492a-4b22-b0bf-c4e13e2b395d service nova] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Detach interface failed, port_id=040cd84c-473e-4fee-b689-54b1128ae340, reason: Instance 7f54f9a6-3236-44c1-b327-1941dbfa3ff0 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 902.353527] env[68279]: DEBUG nova.scheduler.client.report [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.467775] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 11c439ab-e27c-43e6-b752-c90af5f84bc1] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 902.603207] env[68279]: DEBUG nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 902.636919] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 902.637179] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.637461] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.637532] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.637658] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.637805] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 902.638014] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 902.638194] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 902.638362] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 902.638532] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 902.638690] env[68279]: DEBUG nova.virt.hardware [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 902.639603] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b409d337-432b-4592-ad28-0650e14d68ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.642977] env[68279]: DEBUG nova.network.neutron [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Updating instance_info_cache with network_info: [{"id": "8afeda32-cf4a-4c25-8095-c1df322b3c5a", "address": "fa:16:3e:29:d7:84", "network": {"id": "d444342e-a281-4616-bfc3-69176ef7a310", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1202175531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6653e6008806494090ed458c550c3eca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8afeda32-cf", "ovs_interfaceid": "8afeda32-cf4a-4c25-8095-c1df322b3c5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.651602] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fba41b6-c790-43f6-9251-e5f73cac9849 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.669319] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.669560] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.669759] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.669940] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.670303] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.672069] env[68279]: INFO nova.compute.manager [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Terminating instance [ 902.757157] env[68279]: DEBUG oslo_vmware.api [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963230, 'name': RemoveSnapshot_Task, 'duration_secs': 0.567691} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.757413] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 902.757635] env[68279]: INFO nova.compute.manager [None req-368deca1-6fba-4522-9d48-daa56644536e tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Took 13.81 seconds to snapshot the instance on the hypervisor. [ 902.819394] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963231, 'name': ReconfigVM_Task, 'duration_secs': 0.263558} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.819394] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfigured VM instance instance-00000042 to attach disk [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.819394] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25b7a47e-1ca4-4d03-9a26-325f9c4b487f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.824972] env[68279]: INFO nova.compute.manager [-] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Took 1.50 seconds to deallocate network for instance. [ 902.832826] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 902.832826] env[68279]: value = "task-2963232" [ 902.832826] env[68279]: _type = "Task" [ 902.832826] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.842211] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963232, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.975048] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: deea2dea-1860-45a0-9637-ced09bb51b81] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 903.146049] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Releasing lock "refresh_cache-5cbe4915-5b01-4424-96c8-f3225e512c89" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.146401] env[68279]: DEBUG nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Instance network_info: |[{"id": "8afeda32-cf4a-4c25-8095-c1df322b3c5a", "address": "fa:16:3e:29:d7:84", "network": {"id": "d444342e-a281-4616-bfc3-69176ef7a310", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1202175531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6653e6008806494090ed458c550c3eca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8afeda32-cf", "ovs_interfaceid": "8afeda32-cf4a-4c25-8095-c1df322b3c5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 903.146738] env[68279]: DEBUG oslo_concurrency.lockutils [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] Acquired lock "refresh_cache-5cbe4915-5b01-4424-96c8-f3225e512c89" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.146956] env[68279]: DEBUG nova.network.neutron [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Refreshing network info cache for port 8afeda32-cf4a-4c25-8095-c1df322b3c5a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 903.148159] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:d7:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8afeda32-cf4a-4c25-8095-c1df322b3c5a', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 903.155647] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Creating folder: Project (6653e6008806494090ed458c550c3eca). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.156479] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-266284eb-b175-4dca-b583-82970c71bac1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.173440] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Created folder: Project (6653e6008806494090ed458c550c3eca) in parent group-v594445. [ 903.173633] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Creating folder: Instances. Parent ref: group-v594639. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.173864] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b155295b-84c5-4025-b394-33550c64a12b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.175947] env[68279]: DEBUG nova.compute.manager [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 903.176259] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 903.176966] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f63fd7-7ca6-4038-b743-580d31c6f913 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.185632] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.185954] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8ff4fdc-1c2b-43fa-b190-8e3194370703 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.192765] env[68279]: DEBUG oslo_vmware.api [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 903.192765] env[68279]: value = "task-2963235" [ 903.192765] env[68279]: _type = "Task" [ 903.192765] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.194059] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Created folder: Instances in parent group-v594639. [ 903.198022] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 903.198022] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 903.198022] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83a11bb5-4f50-4c2a-92fc-5f49a6731ae7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.219098] env[68279]: DEBUG oslo_vmware.api [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.219599] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 903.219599] env[68279]: value = "task-2963236" [ 903.219599] env[68279]: _type = "Task" [ 903.219599] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.230290] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963236, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.351150] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963232, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.366672] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.836s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.375313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.967s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.376948] env[68279]: INFO nova.compute.claims [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.407284] env[68279]: INFO nova.compute.manager [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Took 0.58 seconds to detach 1 volumes for instance. [ 903.479384] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 9d4b56df-11d9-4d94-94f3-6c5e27ea85f4] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 903.707286] env[68279]: DEBUG oslo_vmware.api [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.729791] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963236, 'name': CreateVM_Task, 'duration_secs': 0.372071} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.730056] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 903.730708] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.730878] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.731235] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 903.731502] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b14c3ab-8bd8-4e46-904d-376ae0adbd02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.736793] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 903.736793] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ad5850-c9d5-b92e-3bab-3f57dab9a641" [ 903.736793] env[68279]: _type = "Task" [ 903.736793] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.745725] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ad5850-c9d5-b92e-3bab-3f57dab9a641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.856947] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963232, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.912454] env[68279]: DEBUG nova.network.neutron [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Successfully updated port: 58be91e3-be6b-4118-8032-e40c6f5f099d {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.923939] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.943139] env[68279]: DEBUG nova.network.neutron [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Updated VIF entry in instance network info cache for port 8afeda32-cf4a-4c25-8095-c1df322b3c5a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 903.943544] env[68279]: DEBUG nova.network.neutron [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Updating instance_info_cache with network_info: [{"id": "8afeda32-cf4a-4c25-8095-c1df322b3c5a", "address": "fa:16:3e:29:d7:84", "network": {"id": "d444342e-a281-4616-bfc3-69176ef7a310", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1202175531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6653e6008806494090ed458c550c3eca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8afeda32-cf", "ovs_interfaceid": "8afeda32-cf4a-4c25-8095-c1df322b3c5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.982889] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 010e5bfc-814c-4bde-8a16-7c2009ee13b6] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.205856] env[68279]: DEBUG oslo_vmware.api [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963235, 'name': PowerOffVM_Task, 'duration_secs': 0.962656} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.206277] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 904.206456] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 904.206711] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dd46c2e-38ae-4960-ac14-cbafb470fd76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.248531] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ad5850-c9d5-b92e-3bab-3f57dab9a641, 'name': SearchDatastore_Task, 'duration_secs': 0.010617} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.248966] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.249309] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 904.249578] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.250137] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.250376] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 904.250729] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f1b5637-f602-46e4-a91f-76eb8e640401 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.262256] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 904.262437] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 904.263678] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adb85ea3-fcfa-49c0-ac90-943f7521976f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.270544] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 904.270760] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 904.270965] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleting the datastore file [datastore2] 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.271700] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-487f40fc-82d0-43c9-9887-d1cdce75a259 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.274285] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 904.274285] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5254dace-ff9f-efa2-aa6b-8360ca5c77c5" [ 904.274285] env[68279]: _type = "Task" [ 904.274285] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.279928] env[68279]: DEBUG oslo_vmware.api [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 904.279928] env[68279]: value = "task-2963238" [ 904.279928] env[68279]: _type = "Task" [ 904.279928] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.287183] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5254dace-ff9f-efa2-aa6b-8360ca5c77c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.293031] env[68279]: DEBUG oslo_vmware.api [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963238, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.346025] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963232, 'name': Rename_Task, 'duration_secs': 1.174073} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.346368] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.346627] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21bfd179-07cd-4ef0-bd9d-1e46b868c0a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.354509] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 904.354509] env[68279]: value = "task-2963239" [ 904.354509] env[68279]: _type = "Task" [ 904.354509] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.363936] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963239, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.415681] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.415894] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.416108] env[68279]: DEBUG nova.network.neutron [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.446722] env[68279]: DEBUG oslo_concurrency.lockutils [req-c4a04f65-9d7c-4189-afd6-e2ee75164abd req-56baa1fa-a95c-4316-bdc4-8bae8aebbd5b service nova] Releasing lock "refresh_cache-5cbe4915-5b01-4424-96c8-f3225e512c89" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.489225] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: b40956fc-66f5-4bb6-8763-22465bb221bf] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.545615] env[68279]: DEBUG nova.compute.manager [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Received event network-vif-plugged-58be91e3-be6b-4118-8032-e40c6f5f099d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.545615] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] Acquiring lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.545615] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.545615] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.546032] env[68279]: DEBUG nova.compute.manager [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] No waiting events found dispatching network-vif-plugged-58be91e3-be6b-4118-8032-e40c6f5f099d {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 904.549894] env[68279]: WARNING nova.compute.manager [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Received unexpected event network-vif-plugged-58be91e3-be6b-4118-8032-e40c6f5f099d for instance with vm_state building and task_state spawning. [ 904.549894] env[68279]: DEBUG nova.compute.manager [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Received event network-changed-58be91e3-be6b-4118-8032-e40c6f5f099d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.549894] env[68279]: DEBUG nova.compute.manager [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Refreshing instance network info cache due to event network-changed-58be91e3-be6b-4118-8032-e40c6f5f099d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 904.550178] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] Acquiring lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.632050] env[68279]: DEBUG nova.compute.manager [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.632854] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a26429-2dbf-4239-baab-71780924c508 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.788409] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5254dace-ff9f-efa2-aa6b-8360ca5c77c5, 'name': SearchDatastore_Task, 'duration_secs': 0.012016} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.792291] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eb13426-8122-4493-a90b-54e1ed2ac39b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.798292] env[68279]: DEBUG oslo_vmware.api [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963238, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149095} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.798987] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.799719] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.800031] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.800155] env[68279]: INFO nova.compute.manager [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Took 1.62 seconds to destroy the instance on the hypervisor. [ 904.800430] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.800884] env[68279]: DEBUG nova.compute.manager [-] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.800961] env[68279]: DEBUG nova.network.neutron [-] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.807484] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 904.807484] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f1a077-9718-95c1-41e3-9fedb832e5f7" [ 904.807484] env[68279]: _type = "Task" [ 904.807484] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.826103] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f1a077-9718-95c1-41e3-9fedb832e5f7, 'name': SearchDatastore_Task, 'duration_secs': 0.01304} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.826268] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.826791] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 5cbe4915-5b01-4424-96c8-f3225e512c89/5cbe4915-5b01-4424-96c8-f3225e512c89.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 904.826791] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7d65c6c-c90c-4f5d-a064-e13768af22b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.835583] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 904.835583] env[68279]: value = "task-2963240" [ 904.835583] env[68279]: _type = "Task" [ 904.835583] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.850504] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.866534] env[68279]: DEBUG oslo_vmware.api [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963239, 'name': PowerOnVM_Task, 'duration_secs': 0.47546} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.866813] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.867145] env[68279]: INFO nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Took 8.86 seconds to spawn the instance on the hypervisor. [ 904.867261] env[68279]: DEBUG nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.868059] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c042777c-531d-4e76-88e4-8414d92d5776 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.920455] env[68279]: INFO nova.compute.manager [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Swapping old allocation on dict_keys(['40ba16cf-8244-4715-b8c1-975029462ee4']) held by migration 7ecdeeb2-b081-4af5-b57a-62f05991fa32 for instance [ 904.952061] env[68279]: DEBUG nova.scheduler.client.report [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Overwriting current allocation {'allocations': {'40ba16cf-8244-4715-b8c1-975029462ee4': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 98}}, 'project_id': '36fa09849bed42f69be37a023b710523', 'user_id': '88074da303124c9db173ac0c253f5c27', 'consumer_generation': 1} on consumer 665d932d-1068-4bb2-835c-2184a80753d1 {{(pid=68279) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 904.960713] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee20aa0-76a8-4c31-b67b-8e8eb105b0fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.970071] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b531677-568b-4692-84cf-7196e297040e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.974248] env[68279]: DEBUG nova.network.neutron [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.010678] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "2cdd785d-6758-469f-b1f6-266154853f8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.010977] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.012318] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: c9bda338-6c7d-4850-8f46-7cd916372ac9] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.023417] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11222f9c-b1ce-4308-b8d2-789f503b483e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.035033] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060515ab-0030-4a46-94b1-28d265f29744 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.053916] env[68279]: DEBUG nova.compute.provider_tree [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.068524] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.068771] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquired lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.069100] env[68279]: DEBUG nova.network.neutron [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.151688] env[68279]: INFO nova.compute.manager [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] instance snapshotting [ 905.157868] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1492e9bb-95c4-45fe-b8a3-78c4668be976 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.202211] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3699781-763a-4b02-b601-8e4ba21ba8d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.248226] env[68279]: DEBUG nova.compute.manager [req-7f73baf0-86c1-400c-a36d-d884f47989c7 req-d8c7dbe2-847a-410b-bee2-96c58c57c5cb service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Received event network-vif-deleted-897f9e70-e215-4b51-8dec-f0e2b05f7b12 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.248454] env[68279]: INFO nova.compute.manager [req-7f73baf0-86c1-400c-a36d-d884f47989c7 req-d8c7dbe2-847a-410b-bee2-96c58c57c5cb service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Neutron deleted interface 897f9e70-e215-4b51-8dec-f0e2b05f7b12; detaching it from the instance and deleting it from the info cache [ 905.248630] env[68279]: DEBUG nova.network.neutron [req-7f73baf0-86c1-400c-a36d-d884f47989c7 req-d8c7dbe2-847a-410b-bee2-96c58c57c5cb service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.252734] env[68279]: DEBUG nova.network.neutron [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance_info_cache with network_info: [{"id": "58be91e3-be6b-4118-8032-e40c6f5f099d", "address": "fa:16:3e:79:d1:f2", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58be91e3-be", "ovs_interfaceid": "58be91e3-be6b-4118-8032-e40c6f5f099d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.346715] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.347010] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 5cbe4915-5b01-4424-96c8-f3225e512c89/5cbe4915-5b01-4424-96c8-f3225e512c89.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.347353] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.347680] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f07598fb-8606-424c-b501-27c024e5781e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.356443] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 905.356443] env[68279]: value = "task-2963241" [ 905.356443] env[68279]: _type = "Task" [ 905.356443] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.367912] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.393811] env[68279]: INFO nova.compute.manager [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Took 69.54 seconds to build instance. [ 905.515025] env[68279]: DEBUG nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.527074] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 65c3761e-c236-41a9-9adb-d1a6e7a9a7c7] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.557750] env[68279]: DEBUG nova.scheduler.client.report [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.666211] env[68279]: DEBUG nova.network.neutron [-] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.724481] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 905.724776] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3091c420-4818-49a4-b178-53a1467acc8c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.751569] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d35100ad-d6ce-434f-a0f0-e2cf3d5cac2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.755148] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.755462] env[68279]: DEBUG nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Instance network_info: |[{"id": "58be91e3-be6b-4118-8032-e40c6f5f099d", "address": "fa:16:3e:79:d1:f2", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58be91e3-be", "ovs_interfaceid": "58be91e3-be6b-4118-8032-e40c6f5f099d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 905.755744] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] Acquired lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.755918] env[68279]: DEBUG nova.network.neutron [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Refreshing network info cache for port 58be91e3-be6b-4118-8032-e40c6f5f099d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.757055] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:d1:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58be91e3-be6b-4118-8032-e40c6f5f099d', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.765058] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.766616] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.766966] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bb8f124-b706-4143-8e4f-f4981a17b5f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.784456] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8761fc5c-581b-4546-844c-9d817967e35c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.802046] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 905.802046] env[68279]: value = "task-2963242" [ 905.802046] env[68279]: _type = "Task" [ 905.802046] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.807422] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.807422] env[68279]: value = "task-2963243" [ 905.807422] env[68279]: _type = "Task" [ 905.807422] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.815904] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963242, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.832130] env[68279]: DEBUG nova.compute.manager [req-7f73baf0-86c1-400c-a36d-d884f47989c7 req-d8c7dbe2-847a-410b-bee2-96c58c57c5cb service nova] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Detach interface failed, port_id=897f9e70-e215-4b51-8dec-f0e2b05f7b12, reason: Instance 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 905.837433] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963243, 'name': CreateVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.867905] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067847} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.868207] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.869064] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ba6e38-1e8a-4e8e-bbbf-7422bb025147 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.892237] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 5cbe4915-5b01-4424-96c8-f3225e512c89/5cbe4915-5b01-4424-96c8-f3225e512c89.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.892495] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8aa22f1-fc79-46c6-bdef-8cd466537c02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.907488] env[68279]: DEBUG nova.network.neutron [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [{"id": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "address": "fa:16:3e:3c:9f:44", "network": {"id": "768d87d4-025b-491c-b5a6-9eaabd54f052", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c7a4facdfe194e1cab42f5a1979bf666", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d0066b-e3", "ovs_interfaceid": "14d0066b-e387-4f2f-a12a-c40206f0b1d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.909387] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a4f3ddaa-551d-4182-b907-3e66615a1a20 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.620s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.916917] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 905.916917] env[68279]: value = "task-2963244" [ 905.916917] env[68279]: _type = "Task" [ 905.916917] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.929032] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963244, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.031195] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 4c99c929-9fda-42f0-9327-0508ad3e6150] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.042405] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.063675] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.064279] env[68279]: DEBUG nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 906.067591] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.558s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.069159] env[68279]: INFO nova.compute.claims [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.170081] env[68279]: INFO nova.compute.manager [-] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Took 1.37 seconds to deallocate network for instance. [ 906.316678] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963242, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.323335] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963243, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.410876] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Releasing lock "refresh_cache-665d932d-1068-4bb2-835c-2184a80753d1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.411083] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.411420] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4284de9b-fb18-4e6c-898c-fa88cfee9d1e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.530939] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963244, 'name': ReconfigVM_Task, 'duration_secs': 0.322372} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.530939] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 906.530939] env[68279]: value = "task-2963245" [ 906.530939] env[68279]: _type = "Task" [ 906.530939] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.532898] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 5cbe4915-5b01-4424-96c8-f3225e512c89/5cbe4915-5b01-4424-96c8-f3225e512c89.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.533890] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 6ca13774-f4db-4c9c-9da7-b773ce6cc6e7] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.540521] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71937c7c-c9d4-49aa-bd0f-37d62f1dc757 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.557315] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963245, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.558301] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 906.558301] env[68279]: value = "task-2963246" [ 906.558301] env[68279]: _type = "Task" [ 906.558301] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.568716] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963246, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.576789] env[68279]: DEBUG nova.compute.utils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 906.578522] env[68279]: DEBUG nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 906.582023] env[68279]: DEBUG nova.network.neutron [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 906.627650] env[68279]: DEBUG nova.policy [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36b39430ec184c72b8950247fd1added', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37b1b1fd2ea44d83b954e5b90ae9e3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 906.676683] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.808856] env[68279]: DEBUG nova.network.neutron [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updated VIF entry in instance network info cache for port 58be91e3-be6b-4118-8032-e40c6f5f099d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 906.809500] env[68279]: DEBUG nova.network.neutron [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance_info_cache with network_info: [{"id": "58be91e3-be6b-4118-8032-e40c6f5f099d", "address": "fa:16:3e:79:d1:f2", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58be91e3-be", "ovs_interfaceid": "58be91e3-be6b-4118-8032-e40c6f5f099d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.818304] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963242, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.824681] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963243, 'name': CreateVM_Task, 'duration_secs': 0.583273} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.825495] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.826322] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.826438] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.826718] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 906.827665] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2322ab5-937d-474e-a7ef-44f812934118 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.833783] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 906.833783] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52319644-2d25-93ca-015d-d42d02f4f65a" [ 906.833783] env[68279]: _type = "Task" [ 906.833783] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.842933] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52319644-2d25-93ca-015d-d42d02f4f65a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.940322] env[68279]: DEBUG nova.network.neutron [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Successfully created port: ad0276f3-cb04-4653-b770-08562e96ba17 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.044362] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963245, 'name': PowerOffVM_Task, 'duration_secs': 0.233996} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.046210] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.046210] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:45:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1a90e579-85e0-4e3b-aa60-125e40db2a15',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1064338248',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 907.046414] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.046490] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.046659] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.046809] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.046958] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 907.047181] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 907.047347] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 907.047512] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 907.047675] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 907.048012] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 907.052922] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: b869231a-5293-433f-ac7c-d50030368826] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 907.054642] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe6ab94e-f703-4a8a-931c-5c9a4c3f4c8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.076686] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963246, 'name': Rename_Task, 'duration_secs': 0.153738} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.078802] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.082302] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 907.082302] env[68279]: value = "task-2963247" [ 907.082302] env[68279]: _type = "Task" [ 907.082302] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.082715] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c83c4c11-a3ea-4d34-95a2-f8d50e9daf5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.087397] env[68279]: DEBUG nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 907.097854] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.099894] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 907.099894] env[68279]: value = "task-2963248" [ 907.099894] env[68279]: _type = "Task" [ 907.099894] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.109577] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963248, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.283650] env[68279]: DEBUG nova.compute.manager [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Received event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.283852] env[68279]: DEBUG nova.compute.manager [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing instance network info cache due to event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 907.284082] env[68279]: DEBUG oslo_concurrency.lockutils [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] Acquiring lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.284232] env[68279]: DEBUG oslo_concurrency.lockutils [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] Acquired lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.284395] env[68279]: DEBUG nova.network.neutron [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.321149] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ce44f47-5196-4d3e-8766-3dcb663206c3 req-e75ed3d7-a208-40f2-9139-1c2b55a8b1f0 service nova] Releasing lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.321372] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963242, 'name': CreateSnapshot_Task, 'duration_secs': 1.198555} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.321889] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 907.322699] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bba924d-e212-4073-99c7-5d62a1d978b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.352561] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52319644-2d25-93ca-015d-d42d02f4f65a, 'name': SearchDatastore_Task, 'duration_secs': 0.010359} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.352561] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.352561] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.352739] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.352841] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.353019] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.353291] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a74ecd5-a8fd-4e18-9a5e-06142424960c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.363289] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.363480] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.364230] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-867b6d7c-f13f-4a73-87f4-60079ab0f161 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.375329] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 907.375329] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52735f91-be51-cca8-64ea-55b0d8aad319" [ 907.375329] env[68279]: _type = "Task" [ 907.375329] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.389300] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52735f91-be51-cca8-64ea-55b0d8aad319, 'name': SearchDatastore_Task, 'duration_secs': 0.010939} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.393104] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1760b876-92e3-4a0a-a24f-cf9b08100bba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.399406] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 907.399406] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe8d62-f369-5240-1891-c9cad60a2ffb" [ 907.399406] env[68279]: _type = "Task" [ 907.399406] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.410486] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe8d62-f369-5240-1891-c9cad60a2ffb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.566101] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 239d0522-5101-49e0-8d3b-85b54927cd21] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 907.609410] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963247, 'name': ReconfigVM_Task, 'duration_secs': 0.163825} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.613826] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377b1dad-412b-48ad-b60d-2674a75ab153 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.621628] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963248, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.646581] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:45:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1a90e579-85e0-4e3b-aa60-125e40db2a15',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1064338248',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 907.646751] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.646873] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.647079] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.647231] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.647382] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 907.647669] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 907.647851] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 907.648035] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 907.648214] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 907.648418] env[68279]: DEBUG nova.virt.hardware [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 907.649900] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a2e0a78-ef72-46d1-9e7e-5e2ba903d3c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.659278] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 907.659278] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522a346a-3a6c-a078-9f73-f9b950f94ae6" [ 907.659278] env[68279]: _type = "Task" [ 907.659278] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.668364] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522a346a-3a6c-a078-9f73-f9b950f94ae6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.670109] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63aa908e-f063-4e2c-b538-40ba6a604483 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.677450] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7f3197-ac50-406e-8f01-fa2b104c180c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.713325] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29464ea1-aaa7-4148-81b2-0e2e3141c14a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.723048] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65858f66-1f3e-47e6-9687-7e76bc9044b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.737686] env[68279]: DEBUG nova.compute.provider_tree [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.845708] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 907.845708] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7d671c3f-85e6-46c0-9ce0-2c93523cca53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.856468] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 907.856468] env[68279]: value = "task-2963249" [ 907.856468] env[68279]: _type = "Task" [ 907.856468] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.865267] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963249, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.910944] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fe8d62-f369-5240-1891-c9cad60a2ffb, 'name': SearchDatastore_Task, 'duration_secs': 0.013127} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.911222] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.911502] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 866eb440-4fc9-4708-8a3b-b53f2be3f6c8/866eb440-4fc9-4708-8a3b-b53f2be3f6c8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.911765] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80eaf090-fd9c-4232-be85-3f1e67ead492 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.919475] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 907.919475] env[68279]: value = "task-2963250" [ 907.919475] env[68279]: _type = "Task" [ 907.919475] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.928699] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.070649] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 6b778e98-12c2-42a5-a772-06ea32d090b8] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.103923] env[68279]: DEBUG nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 908.117278] env[68279]: DEBUG oslo_vmware.api [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963248, 'name': PowerOnVM_Task, 'duration_secs': 0.527728} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.117577] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 908.117788] env[68279]: INFO nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Took 8.37 seconds to spawn the instance on the hypervisor. [ 908.118272] env[68279]: DEBUG nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 908.118821] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c923d8ec-1627-44a4-b14f-2132b663bbc3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.140389] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 908.140389] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.140389] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 908.140389] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.140389] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 908.140389] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 908.140734] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 908.140734] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 908.140889] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 908.141753] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 908.141753] env[68279]: DEBUG nova.virt.hardware [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 908.142396] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d210972f-d473-4802-92cb-0375c64be09c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.152782] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b776fc9-2fa8-4ee1-8221-4bf761031004 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.181319] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522a346a-3a6c-a078-9f73-f9b950f94ae6, 'name': SearchDatastore_Task, 'duration_secs': 0.016243} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.186805] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 908.187423] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d56028a9-1921-4e01-b7f1-c2c9e9d45253 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.205988] env[68279]: DEBUG nova.network.neutron [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updated VIF entry in instance network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.206460] env[68279]: DEBUG nova.network.neutron [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.209398] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 908.209398] env[68279]: value = "task-2963251" [ 908.209398] env[68279]: _type = "Task" [ 908.209398] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.222519] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.241930] env[68279]: DEBUG nova.scheduler.client.report [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.371046] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963249, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.429790] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963250, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.505215] env[68279]: DEBUG nova.network.neutron [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Successfully updated port: ad0276f3-cb04-4653-b770-08562e96ba17 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.575032] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e65722bb-e39a-47e5-9aaf-87cfd27930d1] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.637802] env[68279]: INFO nova.compute.manager [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Took 69.60 seconds to build instance. [ 908.710629] env[68279]: DEBUG oslo_concurrency.lockutils [req-2882cfa7-86e6-4254-a8a0-eafecf5b11b1 req-953f5cbf-727a-4708-8333-c83efb54c053 service nova] Releasing lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.720827] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963251, 'name': ReconfigVM_Task, 'duration_secs': 0.23967} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.721214] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 908.722126] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d410311e-a365-40fd-92fc-d5baa9bef009 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.745179] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.745474] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca47a8ee-b806-487c-9a60-a02cc9f3bc6d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.759185] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.759723] env[68279]: DEBUG nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 908.762508] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.754s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.763953] env[68279]: INFO nova.compute.claims [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.773071] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 908.773071] env[68279]: value = "task-2963252" [ 908.773071] env[68279]: _type = "Task" [ 908.773071] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.784725] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.870810] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963249, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.932755] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687548} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.932962] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 866eb440-4fc9-4708-8a3b-b53f2be3f6c8/866eb440-4fc9-4708-8a3b-b53f2be3f6c8.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.933232] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.933933] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b53e3f41-1c64-452b-ba63-5090296cd3a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.943293] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 908.943293] env[68279]: value = "task-2963253" [ 908.943293] env[68279]: _type = "Task" [ 908.943293] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.956743] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.009120] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "refresh_cache-7e34039c-c51a-4f9c-961c-144f6d8a5130" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.009120] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "refresh_cache-7e34039c-c51a-4f9c-961c-144f6d8a5130" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.009120] env[68279]: DEBUG nova.network.neutron [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.078388] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 3d3b5611-714f-4757-b848-891319c2fea3] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.139489] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55d88777-d311-47d4-a9ea-a08716c6b26d tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.719s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.270927] env[68279]: DEBUG nova.compute.utils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 909.272490] env[68279]: DEBUG nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 909.272660] env[68279]: DEBUG nova.network.neutron [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.287627] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963252, 'name': ReconfigVM_Task, 'duration_secs': 0.30713} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.287900] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1/665d932d-1068-4bb2-835c-2184a80753d1.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.288754] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd35d39-f4bd-4ec8-92d5-747b40abf67b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.312702] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095a5ab9-f203-483a-8bbb-7a8e08042b31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.317778] env[68279]: DEBUG nova.compute.manager [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Received event network-vif-plugged-ad0276f3-cb04-4653-b770-08562e96ba17 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.317997] env[68279]: DEBUG oslo_concurrency.lockutils [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] Acquiring lock "7e34039c-c51a-4f9c-961c-144f6d8a5130-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.318226] env[68279]: DEBUG oslo_concurrency.lockutils [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.318390] env[68279]: DEBUG oslo_concurrency.lockutils [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.318556] env[68279]: DEBUG nova.compute.manager [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] No waiting events found dispatching network-vif-plugged-ad0276f3-cb04-4653-b770-08562e96ba17 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 909.318716] env[68279]: WARNING nova.compute.manager [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Received unexpected event network-vif-plugged-ad0276f3-cb04-4653-b770-08562e96ba17 for instance with vm_state building and task_state spawning. [ 909.318871] env[68279]: DEBUG nova.compute.manager [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Received event network-changed-ad0276f3-cb04-4653-b770-08562e96ba17 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.319061] env[68279]: DEBUG nova.compute.manager [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Refreshing instance network info cache due to event network-changed-ad0276f3-cb04-4653-b770-08562e96ba17. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 909.319230] env[68279]: DEBUG oslo_concurrency.lockutils [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] Acquiring lock "refresh_cache-7e34039c-c51a-4f9c-961c-144f6d8a5130" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.338020] env[68279]: DEBUG nova.policy [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd67d0e35641a4494a5087e0f3abdc767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd1384256d224e80bf6f25b9fd054376', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 909.340119] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01c27a6-01eb-4a47-b8ab-ddf8564194ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.359842] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "5cbe4915-5b01-4424-96c8-f3225e512c89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.360125] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.360337] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "5cbe4915-5b01-4424-96c8-f3225e512c89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.360512] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.360679] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.365953] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff635036-4de0-423c-816d-46ff60faee4d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.369079] env[68279]: INFO nova.compute.manager [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Terminating instance [ 909.379072] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.379357] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963249, 'name': CloneVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.379781] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55effd6b-cd2e-4bf1-89e6-102d4d27b03a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.390198] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 909.390198] env[68279]: value = "task-2963254" [ 909.390198] env[68279]: _type = "Task" [ 909.390198] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.399129] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.454544] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081517} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.454887] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.455739] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d904dce3-7ac2-4bb5-8976-fd5866cc1050 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.484550] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 866eb440-4fc9-4708-8a3b-b53f2be3f6c8/866eb440-4fc9-4708-8a3b-b53f2be3f6c8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.484943] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a70f2f21-8b3f-410c-b8df-93350d41a302 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.507866] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 909.507866] env[68279]: value = "task-2963255" [ 909.507866] env[68279]: _type = "Task" [ 909.507866] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.520827] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963255, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.572881] env[68279]: DEBUG nova.network.neutron [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 909.582143] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 8782d86d-0e94-44b4-9595-b0eb2b2a3fb7] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.696781] env[68279]: DEBUG nova.network.neutron [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Successfully created port: 81c058ad-3832-478e-b2c5-f65692f52164 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.713298] env[68279]: DEBUG nova.network.neutron [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Updating instance_info_cache with network_info: [{"id": "ad0276f3-cb04-4653-b770-08562e96ba17", "address": "fa:16:3e:e6:24:09", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad0276f3-cb", "ovs_interfaceid": "ad0276f3-cb04-4653-b770-08562e96ba17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.778782] env[68279]: DEBUG nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 909.873322] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963249, 'name': CloneVM_Task, 'duration_secs': 1.604406} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.873689] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Created linked-clone VM from snapshot [ 909.874349] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151483fa-fe65-4d0a-a97e-9b3b5c5be679 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.883055] env[68279]: DEBUG nova.compute.manager [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.883289] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.883587] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Uploading image d61da0b1-4731-4fb3-944e-4d833549a243 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 909.888629] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ec3b3c-c13d-4ccd-8eb2-2cf96528800a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.904605] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.904881] env[68279]: DEBUG oslo_vmware.api [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963254, 'name': PowerOnVM_Task, 'duration_secs': 0.412717} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.907641] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73250eff-0c11-403e-b952-11f2a7704c17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.909632] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.916202] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 909.916202] env[68279]: value = "vm-594644" [ 909.916202] env[68279]: _type = "VirtualMachine" [ 909.916202] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 909.916202] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ee625258-b4d0-48a3-8f73-8cc820373db3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.925392] env[68279]: DEBUG oslo_vmware.api [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 909.925392] env[68279]: value = "task-2963256" [ 909.925392] env[68279]: _type = "Task" [ 909.925392] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.927205] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lease: (returnval){ [ 909.927205] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3cdd4-78d7-2dd0-1c59-c9f491a27089" [ 909.927205] env[68279]: _type = "HttpNfcLease" [ 909.927205] env[68279]: } obtained for exporting VM: (result){ [ 909.927205] env[68279]: value = "vm-594644" [ 909.927205] env[68279]: _type = "VirtualMachine" [ 909.927205] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 909.928996] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the lease: (returnval){ [ 909.928996] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3cdd4-78d7-2dd0-1c59-c9f491a27089" [ 909.928996] env[68279]: _type = "HttpNfcLease" [ 909.928996] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 909.945584] env[68279]: DEBUG oslo_vmware.api [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.947710] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 909.947710] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3cdd4-78d7-2dd0-1c59-c9f491a27089" [ 909.947710] env[68279]: _type = "HttpNfcLease" [ 909.947710] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 910.023404] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.086364] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.086531] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Cleaning up deleted instances with incomplete migration {{(pid=68279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 910.215522] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "refresh_cache-7e34039c-c51a-4f9c-961c-144f6d8a5130" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.215894] env[68279]: DEBUG nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Instance network_info: |[{"id": "ad0276f3-cb04-4653-b770-08562e96ba17", "address": "fa:16:3e:e6:24:09", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad0276f3-cb", "ovs_interfaceid": "ad0276f3-cb04-4653-b770-08562e96ba17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 910.216304] env[68279]: DEBUG oslo_concurrency.lockutils [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] Acquired lock "refresh_cache-7e34039c-c51a-4f9c-961c-144f6d8a5130" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.216529] env[68279]: DEBUG nova.network.neutron [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Refreshing network info cache for port ad0276f3-cb04-4653-b770-08562e96ba17 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 910.217745] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:24:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad0276f3-cb04-4653-b770-08562e96ba17', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.225977] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating folder: Project (37b1b1fd2ea44d83b954e5b90ae9e3aa). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 910.232174] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c00855a-fde7-4b61-9ca9-55a0590c400f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.249581] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created folder: Project (37b1b1fd2ea44d83b954e5b90ae9e3aa) in parent group-v594445. [ 910.249794] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating folder: Instances. Parent ref: group-v594645. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 910.250144] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bf85e38-3f68-4fee-9dda-6aa8e2d139e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.262866] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created folder: Instances in parent group-v594645. [ 910.263155] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.265900] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 910.266357] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c79527e0-3485-4d47-a8f1-756ab6ddd947 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.286587] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56007682-5f42-4a40-be73-c629e75063cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.299663] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eaef5d5-37d6-40e7-b792-12f825844dcd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.303094] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.303094] env[68279]: value = "task-2963260" [ 910.303094] env[68279]: _type = "Task" [ 910.303094] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.332988] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acff34b-73d3-4c3d-a9be-4bfda41fcebc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.338933] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963260, 'name': CreateVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.344982] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15277dfd-b595-41aa-8493-c72c1f88c791 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.362022] env[68279]: DEBUG nova.compute.provider_tree [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.439353] env[68279]: DEBUG oslo_vmware.api [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963256, 'name': PowerOffVM_Task, 'duration_secs': 0.332113} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.446022] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.446022] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.446022] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a528f92-23c2-4136-8716-9bc0fb15a8db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.452667] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 910.452667] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3cdd4-78d7-2dd0-1c59-c9f491a27089" [ 910.452667] env[68279]: _type = "HttpNfcLease" [ 910.452667] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 910.453055] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 910.453055] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3cdd4-78d7-2dd0-1c59-c9f491a27089" [ 910.453055] env[68279]: _type = "HttpNfcLease" [ 910.453055] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 910.453955] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e24a6d2-9423-4310-be97-c77f5a6de26d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.462531] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1e13b-97d8-dfbf-c254-fa4dc2f67e67/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 910.462721] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1e13b-97d8-dfbf-c254-fa4dc2f67e67/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 910.535381] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963255, 'name': ReconfigVM_Task, 'duration_secs': 0.900203} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.535651] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 866eb440-4fc9-4708-8a3b-b53f2be3f6c8/866eb440-4fc9-4708-8a3b-b53f2be3f6c8.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.536314] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-687edb5f-5c85-4619-80fc-29ba92450899 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.545430] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 910.545430] env[68279]: value = "task-2963262" [ 910.545430] env[68279]: _type = "Task" [ 910.545430] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.552386] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.552620] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.552844] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Deleting the datastore file [datastore1] 5cbe4915-5b01-4424-96c8-f3225e512c89 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.553142] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74949d72-2f49-463a-8501-a261d64e957f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.559120] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963262, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.565244] env[68279]: DEBUG oslo_vmware.api [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for the task: (returnval){ [ 910.565244] env[68279]: value = "task-2963263" [ 910.565244] env[68279]: _type = "Task" [ 910.565244] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.578558] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b511b626-77e4-444e-b282-3d398d7ad7c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.582636] env[68279]: DEBUG oslo_vmware.api [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963263, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.591575] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.654931] env[68279]: DEBUG nova.network.neutron [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Updated VIF entry in instance network info cache for port ad0276f3-cb04-4653-b770-08562e96ba17. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.654931] env[68279]: DEBUG nova.network.neutron [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Updating instance_info_cache with network_info: [{"id": "ad0276f3-cb04-4653-b770-08562e96ba17", "address": "fa:16:3e:e6:24:09", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad0276f3-cb", "ovs_interfaceid": "ad0276f3-cb04-4653-b770-08562e96ba17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.794967] env[68279]: DEBUG nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 910.822162] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963260, 'name': CreateVM_Task, 'duration_secs': 0.506033} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.824773] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 910.825051] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.825279] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 910.825535] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.825728] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 910.825932] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 910.826210] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 910.826463] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 910.826729] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 910.826961] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 910.827218] env[68279]: DEBUG nova.virt.hardware [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 910.828456] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 910.830611] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355c77f0-5201-4a0c-ba8f-3df1490c9ee6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.834230] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.834500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.835132] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 910.835527] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8df84ce1-90d8-4422-a842-796ed8dbf325 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.842920] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 910.842920] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b962f0-530f-1eae-dad9-cb95fab3c7bf" [ 910.842920] env[68279]: _type = "Task" [ 910.842920] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.849980] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759a8d13-3d25-4241-8b88-fd8a5f140b1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.861984] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b962f0-530f-1eae-dad9-cb95fab3c7bf, 'name': SearchDatastore_Task, 'duration_secs': 0.013323} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.871250] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.871602] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.871834] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.872015] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.872237] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.873164] env[68279]: DEBUG nova.scheduler.client.report [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.877324] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16289db0-8f9f-4402-9975-0eeccea00d15 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.887239] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.887473] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.888228] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2358d159-f2cd-4748-b1c2-ad97c2b4a854 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.894796] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 910.894796] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5275b8f2-1d33-54f3-d3c2-a4989c5f950e" [ 910.894796] env[68279]: _type = "Task" [ 910.894796] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.904725] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5275b8f2-1d33-54f3-d3c2-a4989c5f950e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.927715] env[68279]: INFO nova.compute.manager [None req-56238dc1-9c54-44da-9cde-71bda7b93108 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance to original state: 'active' [ 911.059096] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963262, 'name': Rename_Task, 'duration_secs': 0.196598} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.059485] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.059889] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-435fc3f3-9846-4da2-86a8-f60c3ba46b3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.070873] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 911.070873] env[68279]: value = "task-2963264" [ 911.070873] env[68279]: _type = "Task" [ 911.070873] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.078096] env[68279]: DEBUG oslo_vmware.api [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Task: {'id': task-2963263, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238458} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.078750] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.078982] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.079280] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.079494] env[68279]: INFO nova.compute.manager [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Took 1.20 seconds to destroy the instance on the hypervisor. [ 911.079797] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.080539] env[68279]: DEBUG nova.compute.manager [-] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.080539] env[68279]: DEBUG nova.network.neutron [-] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.085600] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.158700] env[68279]: DEBUG oslo_concurrency.lockutils [req-621581e0-d769-42df-8e2a-38558a8f7b2e req-41320606-bbf1-4068-a7e5-61e08c49a16e service nova] Releasing lock "refresh_cache-7e34039c-c51a-4f9c-961c-144f6d8a5130" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.281300] env[68279]: DEBUG nova.network.neutron [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Successfully updated port: 81c058ad-3832-478e-b2c5-f65692f52164 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.380448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.382750] env[68279]: DEBUG nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.385346] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.743s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.385594] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.390602] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.245s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.390602] env[68279]: INFO nova.compute.claims [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.408890] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5275b8f2-1d33-54f3-d3c2-a4989c5f950e, 'name': SearchDatastore_Task, 'duration_secs': 0.011087} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.409727] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35e48dd3-960f-4422-b5ed-b0da3ef8127e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.418888] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 911.418888] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521ac7d5-6aff-c454-10dc-c81821968364" [ 911.418888] env[68279]: _type = "Task" [ 911.418888] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.429485] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521ac7d5-6aff-c454-10dc-c81821968364, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.529387] env[68279]: DEBUG nova.compute.manager [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Received event network-vif-plugged-81c058ad-3832-478e-b2c5-f65692f52164 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.529824] env[68279]: DEBUG oslo_concurrency.lockutils [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] Acquiring lock "02f34ac7-9deb-4714-92cb-bb507fde1e74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.529981] env[68279]: DEBUG oslo_concurrency.lockutils [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.530223] env[68279]: DEBUG oslo_concurrency.lockutils [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.530485] env[68279]: DEBUG nova.compute.manager [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] No waiting events found dispatching network-vif-plugged-81c058ad-3832-478e-b2c5-f65692f52164 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.530698] env[68279]: WARNING nova.compute.manager [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Received unexpected event network-vif-plugged-81c058ad-3832-478e-b2c5-f65692f52164 for instance with vm_state building and task_state spawning. [ 911.530911] env[68279]: DEBUG nova.compute.manager [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Received event network-changed-81c058ad-3832-478e-b2c5-f65692f52164 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.531159] env[68279]: DEBUG nova.compute.manager [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Refreshing instance network info cache due to event network-changed-81c058ad-3832-478e-b2c5-f65692f52164. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 911.531406] env[68279]: DEBUG oslo_concurrency.lockutils [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] Acquiring lock "refresh_cache-02f34ac7-9deb-4714-92cb-bb507fde1e74" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.531561] env[68279]: DEBUG oslo_concurrency.lockutils [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] Acquired lock "refresh_cache-02f34ac7-9deb-4714-92cb-bb507fde1e74" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.531723] env[68279]: DEBUG nova.network.neutron [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Refreshing network info cache for port 81c058ad-3832-478e-b2c5-f65692f52164 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.583264] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.785660] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-02f34ac7-9deb-4714-92cb-bb507fde1e74" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.882925] env[68279]: DEBUG nova.compute.manager [req-e1ddc85a-b0f4-487f-8a98-5d76c869d77c req-313f316d-00e8-411f-a2c5-eaeb2bd12053 service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Received event network-vif-deleted-8afeda32-cf4a-4c25-8095-c1df322b3c5a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.883416] env[68279]: INFO nova.compute.manager [req-e1ddc85a-b0f4-487f-8a98-5d76c869d77c req-313f316d-00e8-411f-a2c5-eaeb2bd12053 service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Neutron deleted interface 8afeda32-cf4a-4c25-8095-c1df322b3c5a; detaching it from the instance and deleting it from the info cache [ 911.883471] env[68279]: DEBUG nova.network.neutron [req-e1ddc85a-b0f4-487f-8a98-5d76c869d77c req-313f316d-00e8-411f-a2c5-eaeb2bd12053 service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.897998] env[68279]: DEBUG nova.compute.utils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.903212] env[68279]: DEBUG nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.903212] env[68279]: DEBUG nova.network.neutron [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.907729] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74ec00d3-e1ba-44ed-ba08-d751da9420b2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 62.597s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.909190] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 41.522s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.909406] env[68279]: INFO nova.compute.manager [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Unshelving [ 911.931857] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521ac7d5-6aff-c454-10dc-c81821968364, 'name': SearchDatastore_Task, 'duration_secs': 0.012554} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.932535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.933157] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7e34039c-c51a-4f9c-961c-144f6d8a5130/7e34039c-c51a-4f9c-961c-144f6d8a5130.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.933849] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fa385f2-17e7-4a71-ae3f-98d0d2a247a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.945439] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 911.945439] env[68279]: value = "task-2963265" [ 911.945439] env[68279]: _type = "Task" [ 911.945439] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.961858] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.020650] env[68279]: DEBUG nova.policy [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c483580241842d98269131d55f317e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9d27076ab7348bb9ca331f4ff68e46f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 912.087181] env[68279]: DEBUG oslo_vmware.api [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963264, 'name': PowerOnVM_Task, 'duration_secs': 0.930437} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.087634] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.087962] env[68279]: INFO nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Took 9.48 seconds to spawn the instance on the hypervisor. [ 912.088232] env[68279]: DEBUG nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.089391] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1acebc9-e787-43b8-91dd-8b04df8b045f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.094095] env[68279]: DEBUG nova.network.neutron [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.096848] env[68279]: DEBUG nova.network.neutron [-] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.197384] env[68279]: DEBUG nova.network.neutron [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.388906] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3cd2c180-9052-497f-ba83-c1b8a52b48b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.405472] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e8261f-b52e-49cb-bae2-3633887772cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.435301] env[68279]: DEBUG nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.464561] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963265, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.488765] env[68279]: DEBUG nova.compute.manager [req-e1ddc85a-b0f4-487f-8a98-5d76c869d77c req-313f316d-00e8-411f-a2c5-eaeb2bd12053 service nova] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Detach interface failed, port_id=8afeda32-cf4a-4c25-8095-c1df322b3c5a, reason: Instance 5cbe4915-5b01-4424-96c8-f3225e512c89 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 912.582821] env[68279]: DEBUG nova.network.neutron [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Successfully created port: 42723128-3fe2-4814-835a-5ee1b4b1d92e {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.600683] env[68279]: INFO nova.compute.manager [-] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Took 1.52 seconds to deallocate network for instance. [ 912.620736] env[68279]: INFO nova.compute.manager [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Took 64.17 seconds to build instance. [ 912.704662] env[68279]: DEBUG oslo_concurrency.lockutils [req-43285a3b-8ef4-47da-bde7-b964f55286c3 req-afafda05-a2b0-4080-954a-5e79cafa1490 service nova] Releasing lock "refresh_cache-02f34ac7-9deb-4714-92cb-bb507fde1e74" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.706155] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-02f34ac7-9deb-4714-92cb-bb507fde1e74" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.706155] env[68279]: DEBUG nova.network.neutron [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.959366] env[68279]: DEBUG nova.compute.utils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 912.965276] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628539} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.966650] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7e34039c-c51a-4f9c-961c-144f6d8a5130/7e34039c-c51a-4f9c-961c-144f6d8a5130.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 912.966876] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 912.967243] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfe794c8-0871-48a6-b422-cdd1d8df8e10 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.982076] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 912.982076] env[68279]: value = "task-2963266" [ 912.982076] env[68279]: _type = "Task" [ 912.982076] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.992802] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.017356] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "665d932d-1068-4bb2-835c-2184a80753d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.017640] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.004s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.017850] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "665d932d-1068-4bb2-835c-2184a80753d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.018039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.018211] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.021267] env[68279]: INFO nova.compute.manager [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Terminating instance [ 913.068691] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46febc27-cff1-4512-9ec4-6fc719965f8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.080478] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c952b28-866c-4309-bb02-b42933450525 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.111246] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e13d7d6-8ad2-43a9-bc15-5f778abdc262 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.114737] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.121560] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c57e03-6900-4045-b941-2c59ad7bc991 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.125824] env[68279]: DEBUG oslo_concurrency.lockutils [None req-40d4df3d-8721-479a-b8b1-8079651a4305 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.527s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.139016] env[68279]: DEBUG nova.compute.provider_tree [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.252744] env[68279]: DEBUG nova.network.neutron [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.395513] env[68279]: DEBUG nova.network.neutron [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Updating instance_info_cache with network_info: [{"id": "81c058ad-3832-478e-b2c5-f65692f52164", "address": "fa:16:3e:72:a9:a1", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81c058ad-38", "ovs_interfaceid": "81c058ad-3832-478e-b2c5-f65692f52164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.455417] env[68279]: DEBUG nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.469884] env[68279]: INFO nova.virt.block_device [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Booting with volume 064e058e-cfe0-4945-a75c-2c0d2b58a092 at /dev/sdb [ 913.484147] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.484951] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.484951] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.484951] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.485193] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.485277] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.487098] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.487098] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.487098] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.487098] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.487098] env[68279]: DEBUG nova.virt.hardware [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.491140] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f3ccd0-b58e-406b-bc00-f3d5725aa5db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.503249] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695cff73-ee98-4b86-b4fa-60f7a2846e35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.508435] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.210552} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.508435] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.509989] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d150ece0-0b21-4cc6-929d-3e0d06538006 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.522450] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab29d562-46dd-4450-8892-d238877995d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.525142] env[68279]: DEBUG nova.compute.manager [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 913.525364] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.526522] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b96644-859a-45e1-b4e4-34ed56a43bf3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.547725] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 7e34039c-c51a-4f9c-961c-144f6d8a5130/7e34039c-c51a-4f9c-961c-144f6d8a5130.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.550127] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d8909fb-5572-4e44-9b55-9d59fec65b2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.569071] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3209bc49-6b8c-4d59-aaf2-2e655f9bfcf6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.579626] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.580560] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-946262dd-eef6-478c-9c62-c2843460b4e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.589366] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 913.589366] env[68279]: value = "task-2963267" [ 913.589366] env[68279]: _type = "Task" [ 913.589366] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.590689] env[68279]: DEBUG oslo_vmware.api [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 913.590689] env[68279]: value = "task-2963268" [ 913.590689] env[68279]: _type = "Task" [ 913.590689] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.612690] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-904638e1-97e5-4723-a536-9ee5e19a6959 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.617998] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.621511] env[68279]: DEBUG oslo_vmware.api [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963268, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.633026] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2783a3cd-a76d-4b15-a18c-9aa21ad45317 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.646142] env[68279]: DEBUG nova.scheduler.client.report [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.679128] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20f3d52-308b-4743-b62e-dafb0e9f788f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.688985] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4b467c-cc2d-4b3f-a033-362ea3941ad5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.706581] env[68279]: DEBUG nova.virt.block_device [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating existing volume attachment record: c65c02ca-79f4-4e4f-adf9-6567b40ba858 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 913.899465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-02f34ac7-9deb-4714-92cb-bb507fde1e74" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.900042] env[68279]: DEBUG nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance network_info: |[{"id": "81c058ad-3832-478e-b2c5-f65692f52164", "address": "fa:16:3e:72:a9:a1", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81c058ad-38", "ovs_interfaceid": "81c058ad-3832-478e-b2c5-f65692f52164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 913.900731] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:a9:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81c058ad-3832-478e-b2c5-f65692f52164', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.914263] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.914499] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.914857] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1f03992-3a2e-4dd1-b38f-7a13ad12dc56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.941398] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.941398] env[68279]: value = "task-2963270" [ 913.941398] env[68279]: _type = "Task" [ 913.941398] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.952262] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963270, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.107345] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963267, 'name': ReconfigVM_Task, 'duration_secs': 0.375068} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.110786] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 7e34039c-c51a-4f9c-961c-144f6d8a5130/7e34039c-c51a-4f9c-961c-144f6d8a5130.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.111241] env[68279]: DEBUG oslo_vmware.api [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963268, 'name': PowerOffVM_Task, 'duration_secs': 0.492163} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.111502] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec9c90f4-6d32-48d6-9c0d-077b44e34fa1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.113330] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.113632] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.113928] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c94f75ee-d387-43a1-8def-76c666f51113 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.123876] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 914.123876] env[68279]: value = "task-2963271" [ 914.123876] env[68279]: _type = "Task" [ 914.123876] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.135253] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963271, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.152301] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.152879] env[68279]: DEBUG nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 914.155782] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.171s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.157382] env[68279]: INFO nova.compute.claims [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.208519] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.208901] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.209288] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Deleting the datastore file [datastore2] 665d932d-1068-4bb2-835c-2184a80753d1 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.209688] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27bb9c5a-6179-471d-aa03-59f4d4742203 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.222478] env[68279]: DEBUG oslo_vmware.api [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 914.222478] env[68279]: value = "task-2963273" [ 914.222478] env[68279]: _type = "Task" [ 914.222478] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.234505] env[68279]: DEBUG oslo_vmware.api [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963273, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.452129] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963270, 'name': CreateVM_Task, 'duration_secs': 0.498151} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.452345] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.453112] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.453287] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.453698] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.453991] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea3509f8-2f76-4650-9fb4-cbc96a689f61 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.460227] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 914.460227] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5222f30d-0595-6c8a-0feb-ef554485428e" [ 914.460227] env[68279]: _type = "Task" [ 914.460227] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.474309] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5222f30d-0595-6c8a-0feb-ef554485428e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.643113] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963271, 'name': Rename_Task, 'duration_secs': 0.208834} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.645018] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.645018] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85031e33-6fbe-4e84-b798-4856fda46979 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.654921] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 914.654921] env[68279]: value = "task-2963276" [ 914.654921] env[68279]: _type = "Task" [ 914.654921] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.661549] env[68279]: DEBUG nova.compute.utils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 914.669150] env[68279]: DEBUG nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 914.669361] env[68279]: DEBUG nova.network.neutron [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 914.671452] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963276, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.722165] env[68279]: DEBUG nova.policy [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6dcff6c11546f9b0907917a2463755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbad607de614a809c51668c2ac0d012', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 914.735402] env[68279]: DEBUG oslo_vmware.api [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963273, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191288} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.735688] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.735876] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.736064] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.736237] env[68279]: INFO nova.compute.manager [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Took 1.21 seconds to destroy the instance on the hypervisor. [ 914.736532] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 914.736750] env[68279]: DEBUG nova.compute.manager [-] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 914.736842] env[68279]: DEBUG nova.network.neutron [-] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.808499] env[68279]: DEBUG nova.compute.manager [req-78a23359-8e47-4e8f-bb09-a45d39491e30 req-ad884762-df43-487c-bba8-99674f97756c service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Received event network-vif-plugged-42723128-3fe2-4814-835a-5ee1b4b1d92e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.808827] env[68279]: DEBUG oslo_concurrency.lockutils [req-78a23359-8e47-4e8f-bb09-a45d39491e30 req-ad884762-df43-487c-bba8-99674f97756c service nova] Acquiring lock "19f693cd-b598-432d-acf5-64da9f640d5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.809396] env[68279]: DEBUG oslo_concurrency.lockutils [req-78a23359-8e47-4e8f-bb09-a45d39491e30 req-ad884762-df43-487c-bba8-99674f97756c service nova] Lock "19f693cd-b598-432d-acf5-64da9f640d5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.809725] env[68279]: DEBUG oslo_concurrency.lockutils [req-78a23359-8e47-4e8f-bb09-a45d39491e30 req-ad884762-df43-487c-bba8-99674f97756c service nova] Lock "19f693cd-b598-432d-acf5-64da9f640d5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.810104] env[68279]: DEBUG nova.compute.manager [req-78a23359-8e47-4e8f-bb09-a45d39491e30 req-ad884762-df43-487c-bba8-99674f97756c service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] No waiting events found dispatching network-vif-plugged-42723128-3fe2-4814-835a-5ee1b4b1d92e {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.810334] env[68279]: WARNING nova.compute.manager [req-78a23359-8e47-4e8f-bb09-a45d39491e30 req-ad884762-df43-487c-bba8-99674f97756c service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Received unexpected event network-vif-plugged-42723128-3fe2-4814-835a-5ee1b4b1d92e for instance with vm_state building and task_state spawning. [ 914.908605] env[68279]: DEBUG nova.network.neutron [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Successfully updated port: 42723128-3fe2-4814-835a-5ee1b4b1d92e {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.972627] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5222f30d-0595-6c8a-0feb-ef554485428e, 'name': SearchDatastore_Task, 'duration_secs': 0.01213} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.972955] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.973222] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.973470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.973619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.973801] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.974106] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e851dc83-9490-4847-a9dd-df9e246086b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.984731] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.984945] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.986041] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3ac4d8f-d2c1-465d-8405-be1d4c876687 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.993766] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 914.993766] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52533ae3-c3b0-0992-d955-2533b7c81b0b" [ 914.993766] env[68279]: _type = "Task" [ 914.993766] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.005142] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52533ae3-c3b0-0992-d955-2533b7c81b0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.166463] env[68279]: DEBUG oslo_vmware.api [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963276, 'name': PowerOnVM_Task, 'duration_secs': 0.498318} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.167090] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.167090] env[68279]: INFO nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Took 7.06 seconds to spawn the instance on the hypervisor. [ 915.167221] env[68279]: DEBUG nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 915.168280] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0967010c-f10e-41ae-b098-74a10668513f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.172733] env[68279]: DEBUG nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 915.218740] env[68279]: DEBUG nova.compute.manager [req-5102bff0-c860-4ec1-ade6-53c5553dc09f req-3fbb17d5-3047-42db-85fe-94080a4f3752 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Received event network-vif-deleted-14d0066b-e387-4f2f-a12a-c40206f0b1d0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 915.218908] env[68279]: INFO nova.compute.manager [req-5102bff0-c860-4ec1-ade6-53c5553dc09f req-3fbb17d5-3047-42db-85fe-94080a4f3752 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Neutron deleted interface 14d0066b-e387-4f2f-a12a-c40206f0b1d0; detaching it from the instance and deleting it from the info cache [ 915.219294] env[68279]: DEBUG nova.network.neutron [req-5102bff0-c860-4ec1-ade6-53c5553dc09f req-3fbb17d5-3047-42db-85fe-94080a4f3752 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.254690] env[68279]: DEBUG nova.network.neutron [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Successfully created port: d740fd9a-b622-4f9e-b912-7e3d855dc7fa {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 915.411634] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "refresh_cache-19f693cd-b598-432d-acf5-64da9f640d5e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.411771] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "refresh_cache-19f693cd-b598-432d-acf5-64da9f640d5e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.411923] env[68279]: DEBUG nova.network.neutron [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.505492] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52533ae3-c3b0-0992-d955-2533b7c81b0b, 'name': SearchDatastore_Task, 'duration_secs': 0.012498} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.508352] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80667ee5-06be-44d9-b7c9-428e71d9f442 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.515591] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 915.515591] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9554b-fb9b-1367-250e-fc336bf18b0a" [ 915.515591] env[68279]: _type = "Task" [ 915.515591] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.526388] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9554b-fb9b-1367-250e-fc336bf18b0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.653281] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037cd48c-e619-4dbd-a0ec-b1d6b13d897d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.661882] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8504b3f-e81b-4f22-b888-25ecdefbee95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.695314] env[68279]: DEBUG nova.network.neutron [-] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.701696] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9f2fbb-3122-486d-b919-8b8b7927c437 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.706738] env[68279]: INFO nova.compute.manager [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Took 59.32 seconds to build instance. [ 915.713536] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3af1522-e69c-4189-9dc9-1390cc433027 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.723428] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7230cc23-3630-421d-9193-49e44dd519bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.733295] env[68279]: DEBUG nova.compute.manager [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 915.736622] env[68279]: DEBUG nova.compute.provider_tree [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.746524] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39594e3c-85cb-475b-b4a6-93e9010e91e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.785476] env[68279]: DEBUG nova.compute.manager [req-5102bff0-c860-4ec1-ade6-53c5553dc09f req-3fbb17d5-3047-42db-85fe-94080a4f3752 service nova] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Detach interface failed, port_id=14d0066b-e387-4f2f-a12a-c40206f0b1d0, reason: Instance 665d932d-1068-4bb2-835c-2184a80753d1 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 915.954813] env[68279]: DEBUG nova.network.neutron [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.027389] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9554b-fb9b-1367-250e-fc336bf18b0a, 'name': SearchDatastore_Task, 'duration_secs': 0.019813} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.030320] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.030808] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.030918] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e06069bb-449b-4f10-8a93-726cfd921608 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.041240] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 916.041240] env[68279]: value = "task-2963277" [ 916.041240] env[68279]: _type = "Task" [ 916.041240] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.050342] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.106559] env[68279]: DEBUG nova.network.neutron [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Updating instance_info_cache with network_info: [{"id": "42723128-3fe2-4814-835a-5ee1b4b1d92e", "address": "fa:16:3e:35:7e:e5", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42723128-3f", "ovs_interfaceid": "42723128-3fe2-4814-835a-5ee1b4b1d92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.200558] env[68279]: INFO nova.compute.manager [-] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Took 1.46 seconds to deallocate network for instance. [ 916.201804] env[68279]: DEBUG nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 916.209372] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f63bb0b7-ec31-4d00-ae30-e4ade31f46e8 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.503s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.232919] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='964ec53429caca60196623116ef5fe0f',container_format='bare',created_at=2025-03-12T08:47:50Z,direct_url=,disk_format='vmdk',id=cfebc35c-c617-4fc2-9a37-bdc659062679,min_disk=1,min_ram=0,name='tempest-test-snap-2099027503',owner='9dbad607de614a809c51668c2ac0d012',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-12T08:48:03Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.233184] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.233340] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.233519] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.233672] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.233816] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.234160] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.235029] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.235029] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.235029] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.235029] env[68279]: DEBUG nova.virt.hardware [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.235943] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dd4f0e-8161-47db-8de7-da1ed821a378 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.242825] env[68279]: DEBUG nova.scheduler.client.report [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.254018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e23301-b7a5-426f-9536-0c03b4e5c3bc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.261040] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.557021] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963277, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.610243] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "refresh_cache-19f693cd-b598-432d-acf5-64da9f640d5e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.612642] env[68279]: DEBUG nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Instance network_info: |[{"id": "42723128-3fe2-4814-835a-5ee1b4b1d92e", "address": "fa:16:3e:35:7e:e5", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42723128-3f", "ovs_interfaceid": "42723128-3fe2-4814-835a-5ee1b4b1d92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.613878] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:7e:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42723128-3fe2-4814-835a-5ee1b4b1d92e', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.623088] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.623479] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.623829] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9aaa0fd6-3fd1-478c-928a-17530c2ee10b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.649444] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.649444] env[68279]: value = "task-2963279" [ 916.649444] env[68279]: _type = "Task" [ 916.649444] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.660566] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963279, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.711220] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.748456] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.751224] env[68279]: DEBUG nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.751872] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.644s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.752079] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.754448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.115s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.754666] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.756480] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.436s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.756675] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.758794] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.204s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.758980] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.760697] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.074s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.760978] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.762590] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.639s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.762858] env[68279]: DEBUG nova.objects.instance [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lazy-loading 'resources' on Instance uuid b2e272b3-520a-4ef7-8141-a9d55739d6b9 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.787988] env[68279]: INFO nova.scheduler.client.report [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Deleted allocations for instance 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e [ 916.797851] env[68279]: INFO nova.scheduler.client.report [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Deleted allocations for instance b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e [ 916.807500] env[68279]: INFO nova.scheduler.client.report [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Deleted allocations for instance 777eda1c-ca3f-4db0-b6b9-5901de5781ff [ 916.819280] env[68279]: INFO nova.scheduler.client.report [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Deleted allocations for instance 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03 [ 916.826415] env[68279]: INFO nova.scheduler.client.report [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Deleted allocations for instance 8aa8c866-4807-4a06-904e-53c149047d65 [ 916.828288] env[68279]: DEBUG nova.network.neutron [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Successfully updated port: d740fd9a-b622-4f9e-b912-7e3d855dc7fa {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 917.053675] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963277, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664831} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.053939] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.054201] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.054482] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f592a8fe-58be-4ddd-bfc8-b29f7da8e39c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.066739] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 917.066739] env[68279]: value = "task-2963280" [ 917.066739] env[68279]: _type = "Task" [ 917.066739] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.076922] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963280, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.122093] env[68279]: DEBUG nova.compute.manager [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Received event network-changed-42723128-3fe2-4814-835a-5ee1b4b1d92e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.122319] env[68279]: DEBUG nova.compute.manager [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Refreshing instance network info cache due to event network-changed-42723128-3fe2-4814-835a-5ee1b4b1d92e. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 917.124598] env[68279]: DEBUG oslo_concurrency.lockutils [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] Acquiring lock "refresh_cache-19f693cd-b598-432d-acf5-64da9f640d5e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.124598] env[68279]: DEBUG oslo_concurrency.lockutils [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] Acquired lock "refresh_cache-19f693cd-b598-432d-acf5-64da9f640d5e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.124598] env[68279]: DEBUG nova.network.neutron [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Refreshing network info cache for port 42723128-3fe2-4814-835a-5ee1b4b1d92e {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.162894] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963279, 'name': CreateVM_Task, 'duration_secs': 0.371475} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.162894] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.162894] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.162894] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.162894] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.162894] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9accc1d3-391a-48ba-8e07-2f2f9f60380b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.168813] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 917.168813] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52162e3e-c6ed-9e45-c6ea-2f126b698285" [ 917.168813] env[68279]: _type = "Task" [ 917.168813] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.178220] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52162e3e-c6ed-9e45-c6ea-2f126b698285, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.266320] env[68279]: DEBUG nova.compute.utils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 917.270287] env[68279]: DEBUG nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 917.270488] env[68279]: DEBUG nova.network.neutron [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 917.303185] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e2704c30-3d67-4b34-aca1-c082a11d43d4 tempest-ServersV294TestFqdnHostnames-913658332 tempest-ServersV294TestFqdnHostnames-913658332-project-member] Lock "932663fb-ea20-48d2-b6e8-2d3b32bbdd8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.523s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.305726] env[68279]: DEBUG oslo_concurrency.lockutils [None req-acf1753c-7c1e-4503-b0e9-3f5f0e583486 tempest-ServersTestManualDisk-1681180738 tempest-ServersTestManualDisk-1681180738-project-member] Lock "b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.926s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.318417] env[68279]: DEBUG oslo_concurrency.lockutils [None req-233114c1-a914-476d-bbbb-0211fada564c tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "777eda1c-ca3f-4db0-b6b9-5901de5781ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.168s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.321599] env[68279]: DEBUG nova.policy [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51c793de4bcd4224bb124277db10455a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55456ebdb4454822b357305de4ca7db7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.334119] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "refresh_cache-a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.334269] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "refresh_cache-a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.334416] env[68279]: DEBUG nova.network.neutron [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.338788] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc81c7f6-bda9-4e85-ab94-49bed57153c4 tempest-ServerRescueTestJSON-667711174 tempest-ServerRescueTestJSON-667711174-project-member] Lock "015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.173s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.343617] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45ea7c24-f0b5-43c4-ad8b-46b5074ca43b tempest-ServerMetadataNegativeTestJSON-53829907 tempest-ServerMetadataNegativeTestJSON-53829907-project-member] Lock "8aa8c866-4807-4a06-904e-53c149047d65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.787s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.372049] env[68279]: DEBUG nova.compute.manager [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Received event network-vif-plugged-d740fd9a-b622-4f9e-b912-7e3d855dc7fa {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.373059] env[68279]: DEBUG oslo_concurrency.lockutils [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] Acquiring lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.373059] env[68279]: DEBUG oslo_concurrency.lockutils [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.373259] env[68279]: DEBUG oslo_concurrency.lockutils [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.373422] env[68279]: DEBUG nova.compute.manager [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] No waiting events found dispatching network-vif-plugged-d740fd9a-b622-4f9e-b912-7e3d855dc7fa {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 917.373583] env[68279]: WARNING nova.compute.manager [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Received unexpected event network-vif-plugged-d740fd9a-b622-4f9e-b912-7e3d855dc7fa for instance with vm_state building and task_state spawning. [ 917.373737] env[68279]: DEBUG nova.compute.manager [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Received event network-changed-d740fd9a-b622-4f9e-b912-7e3d855dc7fa {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.373907] env[68279]: DEBUG nova.compute.manager [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Refreshing instance network info cache due to event network-changed-d740fd9a-b622-4f9e-b912-7e3d855dc7fa. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 917.374088] env[68279]: DEBUG oslo_concurrency.lockutils [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] Acquiring lock "refresh_cache-a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.587022] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963280, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080731} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.590657] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.592188] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88d1400-4809-454e-b4be-7414a05b368c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.629659] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.634275] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db185a28-0661-4a7d-ae15-3eab356ae9f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.655636] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 917.655636] env[68279]: value = "task-2963281" [ 917.655636] env[68279]: _type = "Task" [ 917.655636] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.668738] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.681746] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52162e3e-c6ed-9e45-c6ea-2f126b698285, 'name': SearchDatastore_Task, 'duration_secs': 0.018487} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.684984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.685517] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.685594] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.685681] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.685871] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.686437] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4762c60-d0c3-4854-95ff-78280ff2cbb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.696576] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.696783] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.700206] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2703c43-de19-4fe1-b871-73d5e907feab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.708266] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 917.708266] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f580a4-2442-213e-3026-c7ee07ae2417" [ 917.708266] env[68279]: _type = "Task" [ 917.708266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.721505] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f580a4-2442-213e-3026-c7ee07ae2417, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.772865] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed886d9-1748-43a2-8971-11deb7e1ab71 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.778180] env[68279]: DEBUG nova.network.neutron [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Successfully created port: fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.780899] env[68279]: DEBUG nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.793510] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8100ee71-93f5-42ab-8b6f-7ee90123b03e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.836644] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83b1ed1-91fc-40d3-a917-3323043a3847 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.849369] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af880000-2be6-4465-81cf-175085f2bb24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.866752] env[68279]: DEBUG nova.compute.provider_tree [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.894442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "80d881c8-3363-4cf8-bf16-a715d8739335" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.894737] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "80d881c8-3363-4cf8-bf16-a715d8739335" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.930611] env[68279]: DEBUG nova.network.neutron [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.965821] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "d61b2c4f-942a-4e29-8cac-11bc0750605a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.966147] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.966458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "d61b2c4f-942a-4e29-8cac-11bc0750605a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.966713] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.966945] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.972672] env[68279]: INFO nova.compute.manager [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Terminating instance [ 918.166734] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963281, 'name': ReconfigVM_Task, 'duration_secs': 0.333123} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.167186] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.170353] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edbc4097-75ab-4895-b442-0d1f0d19d81c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.178871] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 918.178871] env[68279]: value = "task-2963282" [ 918.178871] env[68279]: _type = "Task" [ 918.178871] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.187830] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963282, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.219351] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f580a4-2442-213e-3026-c7ee07ae2417, 'name': SearchDatastore_Task, 'duration_secs': 0.014078} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.220383] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9a0a1d5-81db-49ce-98e1-ba41c28b93e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.226487] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 918.226487] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52194d5a-98a5-acc8-d637-1986972ac8fe" [ 918.226487] env[68279]: _type = "Task" [ 918.226487] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.236159] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52194d5a-98a5-acc8-d637-1986972ac8fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.373291] env[68279]: DEBUG nova.scheduler.client.report [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.397416] env[68279]: DEBUG nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 918.432292] env[68279]: DEBUG nova.network.neutron [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Updating instance_info_cache with network_info: [{"id": "d740fd9a-b622-4f9e-b912-7e3d855dc7fa", "address": "fa:16:3e:e6:b5:65", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd740fd9a-b6", "ovs_interfaceid": "d740fd9a-b622-4f9e-b912-7e3d855dc7fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.458014] env[68279]: DEBUG nova.network.neutron [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Updated VIF entry in instance network info cache for port 42723128-3fe2-4814-835a-5ee1b4b1d92e. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.458014] env[68279]: DEBUG nova.network.neutron [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Updating instance_info_cache with network_info: [{"id": "42723128-3fe2-4814-835a-5ee1b4b1d92e", "address": "fa:16:3e:35:7e:e5", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42723128-3f", "ovs_interfaceid": "42723128-3fe2-4814-835a-5ee1b4b1d92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.484025] env[68279]: DEBUG nova.compute.manager [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 918.484025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.484025] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a825def-7b65-45f2-abff-8cf7bd53631d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.493205] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.493205] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d281806-4a94-419b-83d0-25753fc2a2b8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.503171] env[68279]: DEBUG oslo_vmware.api [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 918.503171] env[68279]: value = "task-2963283" [ 918.503171] env[68279]: _type = "Task" [ 918.503171] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.515172] env[68279]: DEBUG oslo_vmware.api [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.693075] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963282, 'name': Rename_Task, 'duration_secs': 0.160822} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.693075] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.693075] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-930679c3-6fba-40f1-a28d-f4efb4e68d8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.702022] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 918.702022] env[68279]: value = "task-2963284" [ 918.702022] env[68279]: _type = "Task" [ 918.702022] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.711861] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.738739] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52194d5a-98a5-acc8-d637-1986972ac8fe, 'name': SearchDatastore_Task, 'duration_secs': 0.016586} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.739316] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.739754] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 19f693cd-b598-432d-acf5-64da9f640d5e/19f693cd-b598-432d-acf5-64da9f640d5e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.740195] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f57c811-e6e1-4da9-a5c5-e47218520163 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.750176] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 918.750176] env[68279]: value = "task-2963285" [ 918.750176] env[68279]: _type = "Task" [ 918.750176] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.765802] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.792028] env[68279]: DEBUG nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.820125] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.820370] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.820771] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.821162] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.821487] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.821853] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.822247] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.823411] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.823411] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.823784] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.824242] env[68279]: DEBUG nova.virt.hardware [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.826460] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca6ee8d-db3b-473c-b4f7-c63a4ee707c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.836570] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc3c471-1dd2-4ee7-aaaa-83e28a027f25 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.882066] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.117s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.882664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.240s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.885279] env[68279]: INFO nova.compute.claims [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.906729] env[68279]: INFO nova.scheduler.client.report [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Deleted allocations for instance b2e272b3-520a-4ef7-8141-a9d55739d6b9 [ 918.929279] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.938037] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "refresh_cache-a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.938037] env[68279]: DEBUG nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Instance network_info: |[{"id": "d740fd9a-b622-4f9e-b912-7e3d855dc7fa", "address": "fa:16:3e:e6:b5:65", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd740fd9a-b6", "ovs_interfaceid": "d740fd9a-b622-4f9e-b912-7e3d855dc7fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 918.938037] env[68279]: DEBUG oslo_concurrency.lockutils [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] Acquired lock "refresh_cache-a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.938037] env[68279]: DEBUG nova.network.neutron [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Refreshing network info cache for port d740fd9a-b622-4f9e-b912-7e3d855dc7fa {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.938037] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:b5:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd740fd9a-b622-4f9e-b912-7e3d855dc7fa', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.948275] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.949917] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 918.951137] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-749e00f2-957f-47e8-885b-e1e131d460a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.967455] env[68279]: DEBUG oslo_concurrency.lockutils [req-5919e11f-a247-4544-b045-c9277d9e9ba9 req-366ca7a0-d2d4-464c-8e3b-80cb26f0b953 service nova] Releasing lock "refresh_cache-19f693cd-b598-432d-acf5-64da9f640d5e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.977023] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.977023] env[68279]: value = "task-2963286" [ 918.977023] env[68279]: _type = "Task" [ 918.977023] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.986951] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963286, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.016958] env[68279]: DEBUG oslo_vmware.api [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963283, 'name': PowerOffVM_Task, 'duration_secs': 0.196161} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.017702] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.018073] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.018480] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5b5e258-fbb4-4bab-be54-476ac183283a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.107825] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.109433] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.113022] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Deleting the datastore file [datastore2] d61b2c4f-942a-4e29-8cac-11bc0750605a {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.113022] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de585c09-9314-49c3-884c-00f9c0b6a1b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.123115] env[68279]: DEBUG oslo_vmware.api [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for the task: (returnval){ [ 919.123115] env[68279]: value = "task-2963288" [ 919.123115] env[68279]: _type = "Task" [ 919.123115] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.135099] env[68279]: DEBUG oslo_vmware.api [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.218489] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963284, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.271305] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963285, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.420262] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3eaaea60-6195-4ff3-8373-bdfdce7cb77a tempest-FloatingIPsAssociationTestJSON-795807758 tempest-FloatingIPsAssociationTestJSON-795807758-project-member] Lock "b2e272b3-520a-4ef7-8141-a9d55739d6b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.426s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.486449] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963286, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.638043] env[68279]: DEBUG oslo_vmware.api [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.715453] env[68279]: DEBUG oslo_vmware.api [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963284, 'name': PowerOnVM_Task, 'duration_secs': 0.650383} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.719197] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.719197] env[68279]: INFO nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Took 8.92 seconds to spawn the instance on the hypervisor. [ 919.719197] env[68279]: DEBUG nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 919.719197] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd39ac67-42a5-4983-b2ab-2f4afda4ccf9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.774108] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595975} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.774507] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 19f693cd-b598-432d-acf5-64da9f640d5e/19f693cd-b598-432d-acf5-64da9f640d5e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.774775] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 919.776853] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-116dc4ea-928f-4549-8261-418c0dc3f8f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.790491] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 919.790491] env[68279]: value = "task-2963289" [ 919.790491] env[68279]: _type = "Task" [ 919.790491] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.801518] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963289, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.850574] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.995041] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963286, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.032143] env[68279]: DEBUG nova.network.neutron [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Updated VIF entry in instance network info cache for port d740fd9a-b622-4f9e-b912-7e3d855dc7fa. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 920.032503] env[68279]: DEBUG nova.network.neutron [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Updating instance_info_cache with network_info: [{"id": "d740fd9a-b622-4f9e-b912-7e3d855dc7fa", "address": "fa:16:3e:e6:b5:65", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd740fd9a-b6", "ovs_interfaceid": "d740fd9a-b622-4f9e-b912-7e3d855dc7fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.143257] env[68279]: DEBUG oslo_vmware.api [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Task: {'id': task-2963288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.583846} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.143518] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.143713] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.144158] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.144286] env[68279]: INFO nova.compute.manager [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Took 1.66 seconds to destroy the instance on the hypervisor. [ 920.144600] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 920.144812] env[68279]: DEBUG nova.compute.manager [-] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 920.144910] env[68279]: DEBUG nova.network.neutron [-] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 920.242971] env[68279]: INFO nova.compute.manager [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Took 54.75 seconds to build instance. [ 920.307515] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963289, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130748} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.307791] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.309870] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3044a7-4675-41f6-a8d1-8b033009db2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.342889] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 19f693cd-b598-432d-acf5-64da9f640d5e/19f693cd-b598-432d-acf5-64da9f640d5e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.346321] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed832032-2283-43ff-8981-79393d9f9a94 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.371356] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 920.371356] env[68279]: value = "task-2963290" [ 920.371356] env[68279]: _type = "Task" [ 920.371356] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.382238] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963290, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.443142] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b565643-4df9-435e-aee4-4ad51fb01a1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.454958] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a26ab19-4aaa-4651-b4ae-19c48cf9f629 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.501742] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4aaedc6-13df-4f73-9221-65727b65f4ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.505414] env[68279]: DEBUG nova.compute.manager [req-a9586082-595e-4b33-b623-f1513fe46078 req-88bd29ac-e2da-4b5d-b621-4f5dc38d492c service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Received event network-vif-plugged-fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.505667] env[68279]: DEBUG oslo_concurrency.lockutils [req-a9586082-595e-4b33-b623-f1513fe46078 req-88bd29ac-e2da-4b5d-b621-4f5dc38d492c service nova] Acquiring lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.505885] env[68279]: DEBUG oslo_concurrency.lockutils [req-a9586082-595e-4b33-b623-f1513fe46078 req-88bd29ac-e2da-4b5d-b621-4f5dc38d492c service nova] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.506052] env[68279]: DEBUG oslo_concurrency.lockutils [req-a9586082-595e-4b33-b623-f1513fe46078 req-88bd29ac-e2da-4b5d-b621-4f5dc38d492c service nova] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.506229] env[68279]: DEBUG nova.compute.manager [req-a9586082-595e-4b33-b623-f1513fe46078 req-88bd29ac-e2da-4b5d-b621-4f5dc38d492c service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] No waiting events found dispatching network-vif-plugged-fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 920.506826] env[68279]: WARNING nova.compute.manager [req-a9586082-595e-4b33-b623-f1513fe46078 req-88bd29ac-e2da-4b5d-b621-4f5dc38d492c service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Received unexpected event network-vif-plugged-fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 for instance with vm_state building and task_state spawning. [ 920.517298] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477b20d6-8247-4a67-8599-3ff461f80dd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.525503] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963286, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.539126] env[68279]: DEBUG oslo_concurrency.lockutils [req-0dae276d-a598-4db4-868c-722ebc9f9281 req-72116dd4-b614-42ee-963e-ea8bf2de86ae service nova] Releasing lock "refresh_cache-a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.540164] env[68279]: DEBUG nova.compute.provider_tree [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.747757] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8aabf7d4-3472-4fd3-aa77-21b0a4e76544 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.517s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.883685] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963290, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.914648] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1e13b-97d8-dfbf-c254-fa4dc2f67e67/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 920.916347] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14c42bf-907f-40fc-ac0e-caeb9386abeb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.924841] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1e13b-97d8-dfbf-c254-fa4dc2f67e67/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 920.925167] env[68279]: ERROR oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1e13b-97d8-dfbf-c254-fa4dc2f67e67/disk-0.vmdk due to incomplete transfer. [ 920.926256] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2f976436-2713-4e66-a873-5e2614489097 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.939772] env[68279]: DEBUG oslo_vmware.rw_handles [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1e13b-97d8-dfbf-c254-fa4dc2f67e67/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 920.940017] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Uploaded image d61da0b1-4731-4fb3-944e-4d833549a243 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 920.945634] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 920.945634] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-830eca4e-7be2-4416-a9aa-7c381d7482ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.956519] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 920.956519] env[68279]: value = "task-2963291" [ 920.956519] env[68279]: _type = "Task" [ 920.956519] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.968688] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963291, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.009964] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963286, 'name': CreateVM_Task, 'duration_secs': 1.867513} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.012404] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 921.013245] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.013402] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.013777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 921.014056] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f526c3ae-2edb-46be-b674-66b06eed48b8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.019634] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 921.019634] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52294ce1-35ac-7496-59cd-ac573affa801" [ 921.019634] env[68279]: _type = "Task" [ 921.019634] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.029775] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52294ce1-35ac-7496-59cd-ac573affa801, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.043650] env[68279]: DEBUG nova.scheduler.client.report [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.205683] env[68279]: DEBUG nova.network.neutron [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Successfully updated port: fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.384104] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963290, 'name': ReconfigVM_Task, 'duration_secs': 0.518728} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.384474] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 19f693cd-b598-432d-acf5-64da9f640d5e/19f693cd-b598-432d-acf5-64da9f640d5e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.385156] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc0eba00-9042-48d3-89cf-5d401d6f2331 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.392863] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 921.392863] env[68279]: value = "task-2963292" [ 921.392863] env[68279]: _type = "Task" [ 921.392863] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.402960] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963292, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.473679] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963291, 'name': Destroy_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.540861] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.541666] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Processing image cfebc35c-c617-4fc2-9a37-bdc659062679 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 921.542280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679/cfebc35c-c617-4fc2-9a37-bdc659062679.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.546526] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679/cfebc35c-c617-4fc2-9a37-bdc659062679.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.546526] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 921.546526] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-161fad7a-92d0-43a8-8848-a21986460ceb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.548992] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.550964] env[68279]: DEBUG nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 921.553812] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.630s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.554068] env[68279]: DEBUG nova.objects.instance [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'resources' on Instance uuid 7f54f9a6-3236-44c1-b327-1941dbfa3ff0 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.566240] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.566409] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 921.567242] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42e0f838-2413-4340-bbcf-90582a535066 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.575257] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 921.575257] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aab986-f0aa-6566-3263-8fc767b2d779" [ 921.575257] env[68279]: _type = "Task" [ 921.575257] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.587871] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aab986-f0aa-6566-3263-8fc767b2d779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.708499] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "refresh_cache-65688756-ad94-437f-9a36-bd7e3f7f7a2b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.708499] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquired lock "refresh_cache-65688756-ad94-437f-9a36-bd7e3f7f7a2b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.708499] env[68279]: DEBUG nova.network.neutron [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.784707] env[68279]: DEBUG nova.network.neutron [-] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.906033] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963292, 'name': Rename_Task, 'duration_secs': 0.333527} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.906347] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.906599] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad824e9a-5c30-4618-a864-c7b6b0c3c204 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.915785] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 921.915785] env[68279]: value = "task-2963293" [ 921.915785] env[68279]: _type = "Task" [ 921.915785] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.927223] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.968938] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963291, 'name': Destroy_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.060151] env[68279]: DEBUG nova.compute.utils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 922.064928] env[68279]: DEBUG nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 922.065174] env[68279]: DEBUG nova.network.neutron [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.090731] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Preparing fetch location {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 922.091161] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Fetch image to [datastore2] OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8/OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8.vmdk {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 922.091387] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Downloading stream optimized image cfebc35c-c617-4fc2-9a37-bdc659062679 to [datastore2] OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8/OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8.vmdk on the data store datastore2 as vApp {{(pid=68279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 922.093117] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Downloading image file data cfebc35c-c617-4fc2-9a37-bdc659062679 to the ESX as VM named 'OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8' {{(pid=68279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 922.179051] env[68279]: DEBUG nova.policy [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8831a56664404da3a03d6d8241e693ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91ef3e24b66c44a29463a982c192a06e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 922.190984] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 922.190984] env[68279]: value = "resgroup-9" [ 922.190984] env[68279]: _type = "ResourcePool" [ 922.190984] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 922.195065] env[68279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f531e9aa-2e11-4609-b329-7bc6019418a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.218470] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease: (returnval){ [ 922.218470] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5235a2a2-4f5c-0bb2-a539-186bd08096b2" [ 922.218470] env[68279]: _type = "HttpNfcLease" [ 922.218470] env[68279]: } obtained for vApp import into resource pool (val){ [ 922.218470] env[68279]: value = "resgroup-9" [ 922.218470] env[68279]: _type = "ResourcePool" [ 922.218470] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 922.218470] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the lease: (returnval){ [ 922.218470] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5235a2a2-4f5c-0bb2-a539-186bd08096b2" [ 922.218470] env[68279]: _type = "HttpNfcLease" [ 922.218470] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 922.230302] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 922.230302] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5235a2a2-4f5c-0bb2-a539-186bd08096b2" [ 922.230302] env[68279]: _type = "HttpNfcLease" [ 922.230302] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 922.288311] env[68279]: INFO nova.compute.manager [-] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Took 2.14 seconds to deallocate network for instance. [ 922.303289] env[68279]: DEBUG nova.network.neutron [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 922.434155] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963293, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.472778] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963291, 'name': Destroy_Task, 'duration_secs': 1.392086} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.473113] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Destroyed the VM [ 922.473413] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 922.473708] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b1f4ef3b-6325-4949-b2bb-d87a60eca31b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.483070] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 922.483070] env[68279]: value = "task-2963295" [ 922.483070] env[68279]: _type = "Task" [ 922.483070] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.498673] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963295, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.567490] env[68279]: DEBUG nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 922.624654] env[68279]: DEBUG nova.network.neutron [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Updating instance_info_cache with network_info: [{"id": "fdf1f0ce-bee9-4281-94ed-d2d77cf2e560", "address": "fa:16:3e:b8:a1:52", "network": {"id": "0e8d0255-2c05-411c-9a9b-eb510baf6dfb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1645942138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55456ebdb4454822b357305de4ca7db7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ab93899c-92b2-4d84-95a6-192234add28c", "external-id": "nsx-vlan-transportzone-697", "segmentation_id": 697, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf1f0ce-be", "ovs_interfaceid": "fdf1f0ce-bee9-4281-94ed-d2d77cf2e560", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.633039] env[68279]: DEBUG nova.compute.manager [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Received event network-changed-fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 922.633507] env[68279]: DEBUG nova.compute.manager [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Refreshing instance network info cache due to event network-changed-fdf1f0ce-bee9-4281-94ed-d2d77cf2e560. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 922.633507] env[68279]: DEBUG oslo_concurrency.lockutils [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] Acquiring lock "refresh_cache-65688756-ad94-437f-9a36-bd7e3f7f7a2b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.655435] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.655639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.655821] env[68279]: DEBUG nova.compute.manager [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.656963] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f3b370-49d1-40c1-9126-45e4ee3436d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.665886] env[68279]: DEBUG nova.compute.manager [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 922.666564] env[68279]: DEBUG nova.objects.instance [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'flavor' on Instance uuid 02f34ac7-9deb-4714-92cb-bb507fde1e74 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.673560] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f33e91-2bf0-420a-b29b-e482ec1dd719 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.683526] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd61b39a-4c11-4abb-812e-b4325d0e5c83 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.734382] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a601c8-45d1-4e7a-ba28-daf73d537489 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.743973] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 922.743973] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5235a2a2-4f5c-0bb2-a539-186bd08096b2" [ 922.743973] env[68279]: _type = "HttpNfcLease" [ 922.743973] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 922.747819] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7373140-2a2c-403c-a9f2-7d0223a6461c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.765857] env[68279]: DEBUG nova.compute.provider_tree [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.769215] env[68279]: DEBUG nova.network.neutron [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Successfully created port: 6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.803615] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.930328] env[68279]: DEBUG oslo_vmware.api [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963293, 'name': PowerOnVM_Task, 'duration_secs': 0.642723} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.930494] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.930794] env[68279]: INFO nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Took 9.48 seconds to spawn the instance on the hypervisor. [ 922.931463] env[68279]: DEBUG nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.932104] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fc01e8-89ee-48bc-a8e2-92a57145fd2a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.000477] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963295, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.130382] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Releasing lock "refresh_cache-65688756-ad94-437f-9a36-bd7e3f7f7a2b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.130815] env[68279]: DEBUG nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Instance network_info: |[{"id": "fdf1f0ce-bee9-4281-94ed-d2d77cf2e560", "address": "fa:16:3e:b8:a1:52", "network": {"id": "0e8d0255-2c05-411c-9a9b-eb510baf6dfb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1645942138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55456ebdb4454822b357305de4ca7db7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ab93899c-92b2-4d84-95a6-192234add28c", "external-id": "nsx-vlan-transportzone-697", "segmentation_id": 697, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf1f0ce-be", "ovs_interfaceid": "fdf1f0ce-bee9-4281-94ed-d2d77cf2e560", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 923.131579] env[68279]: DEBUG oslo_concurrency.lockutils [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] Acquired lock "refresh_cache-65688756-ad94-437f-9a36-bd7e3f7f7a2b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.131881] env[68279]: DEBUG nova.network.neutron [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Refreshing network info cache for port fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 923.133612] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:a1:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ab93899c-92b2-4d84-95a6-192234add28c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdf1f0ce-bee9-4281-94ed-d2d77cf2e560', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.144321] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Creating folder: Project (55456ebdb4454822b357305de4ca7db7). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.146601] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ca208bb-abf5-4e77-bd60-0aebce81b491 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.160229] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Created folder: Project (55456ebdb4454822b357305de4ca7db7) in parent group-v594445. [ 923.160229] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Creating folder: Instances. Parent ref: group-v594654. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.160229] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7efedb4-68bc-4710-8725-f4d3ae0d634c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.173765] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Created folder: Instances in parent group-v594654. [ 923.174038] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.174246] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.174461] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51865f2e-4a7e-463f-992e-c0348dd772ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.198860] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.198860] env[68279]: value = "task-2963298" [ 923.198860] env[68279]: _type = "Task" [ 923.198860] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.208898] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963298, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.244468] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 923.244468] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5235a2a2-4f5c-0bb2-a539-186bd08096b2" [ 923.244468] env[68279]: _type = "HttpNfcLease" [ 923.244468] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 923.244792] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 923.244792] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5235a2a2-4f5c-0bb2-a539-186bd08096b2" [ 923.244792] env[68279]: _type = "HttpNfcLease" [ 923.244792] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 923.245593] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4298396d-cbea-4675-9b72-a0c48772991d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.256789] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a5908-c427-39e4-7a56-9b9a9d0f88df/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 923.256968] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a5908-c427-39e4-7a56-9b9a9d0f88df/disk-0.vmdk. {{(pid=68279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 923.328119] env[68279]: DEBUG nova.scheduler.client.report [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.340369] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a95d4cb1-0219-4d14-af91-a87b28b3a70a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.453991] env[68279]: INFO nova.compute.manager [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Took 55.46 seconds to build instance. [ 923.495917] env[68279]: DEBUG oslo_vmware.api [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963295, 'name': RemoveSnapshot_Task, 'duration_secs': 0.542755} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.496316] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 923.496554] env[68279]: INFO nova.compute.manager [None req-6039af6e-ec60-4590-959f-a2acebd9eb8c tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Took 18.34 seconds to snapshot the instance on the hypervisor. [ 923.580241] env[68279]: DEBUG nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 923.621136] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.621136] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.621136] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.621136] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.621136] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.621136] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.621136] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.622763] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.623022] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.623256] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.623630] env[68279]: DEBUG nova.virt.hardware [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.625392] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0753e391-97c5-4cef-857f-7d8edc7651a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.634936] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c5bad1-e70c-4c78-a83d-edc1879d7c8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.696391] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.696733] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a567113-de66-4f42-a881-a2bb35e06702 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.711327] env[68279]: DEBUG oslo_vmware.api [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 923.711327] env[68279]: value = "task-2963299" [ 923.711327] env[68279]: _type = "Task" [ 923.711327] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.728446] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963298, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.735435] env[68279]: DEBUG oslo_vmware.api [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.833812] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.280s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.837352] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.794s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.838346] env[68279]: INFO nova.compute.claims [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.870398] env[68279]: INFO nova.scheduler.client.report [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted allocations for instance 7f54f9a6-3236-44c1-b327-1941dbfa3ff0 [ 923.958334] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d539518d-1707-4afb-a31d-cf2afea8a038 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "19f693cd-b598-432d-acf5-64da9f640d5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.926s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.031489] env[68279]: DEBUG nova.network.neutron [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Updated VIF entry in instance network info cache for port fdf1f0ce-bee9-4281-94ed-d2d77cf2e560. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 924.031870] env[68279]: DEBUG nova.network.neutron [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Updating instance_info_cache with network_info: [{"id": "fdf1f0ce-bee9-4281-94ed-d2d77cf2e560", "address": "fa:16:3e:b8:a1:52", "network": {"id": "0e8d0255-2c05-411c-9a9b-eb510baf6dfb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1645942138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55456ebdb4454822b357305de4ca7db7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ab93899c-92b2-4d84-95a6-192234add28c", "external-id": "nsx-vlan-transportzone-697", "segmentation_id": 697, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf1f0ce-be", "ovs_interfaceid": "fdf1f0ce-bee9-4281-94ed-d2d77cf2e560", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.214992] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963298, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.226645] env[68279]: DEBUG oslo_vmware.api [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963299, 'name': PowerOffVM_Task, 'duration_secs': 0.33006} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.228753] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.228969] env[68279]: DEBUG nova.compute.manager [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 924.229947] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e44e2bf-64df-4ad2-aa8e-6971d7dc8be1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.381172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27096a99-eb00-4819-88c6-e5c2dac8b6c2 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "7f54f9a6-3236-44c1-b327-1941dbfa3ff0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.993s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.389388] env[68279]: DEBUG nova.network.neutron [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Successfully updated port: 6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.535507] env[68279]: DEBUG oslo_concurrency.lockutils [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] Releasing lock "refresh_cache-65688756-ad94-437f-9a36-bd7e3f7f7a2b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.535802] env[68279]: DEBUG nova.compute.manager [req-c2641f80-a8b1-452d-b926-0cc1e1134c8b req-ef5eeebf-df44-48e6-abe7-a637b3ce3a01 service nova] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Received event network-vif-deleted-d6e29e5a-01c4-4c55-bb4f-dbac8943124c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.553107] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "19f693cd-b598-432d-acf5-64da9f640d5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.553318] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "19f693cd-b598-432d-acf5-64da9f640d5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.553499] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "19f693cd-b598-432d-acf5-64da9f640d5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.553876] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "19f693cd-b598-432d-acf5-64da9f640d5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.554139] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "19f693cd-b598-432d-acf5-64da9f640d5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.556127] env[68279]: INFO nova.compute.manager [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Terminating instance [ 924.692666] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Completed reading data from the image iterator. {{(pid=68279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 924.692905] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a5908-c427-39e4-7a56-9b9a9d0f88df/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 924.694045] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07f349b-141d-4b1a-95bf-9cb4913a1978 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.702599] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a5908-c427-39e4-7a56-9b9a9d0f88df/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 924.702820] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a5908-c427-39e4-7a56-9b9a9d0f88df/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 924.706242] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e3502eb9-0f8f-4259-943d-39c1942bc39e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.713933] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963298, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.744206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7f2398fe-526f-4276-abbd-56de2615219c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.088s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.893392] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "refresh_cache-192734ca-f549-4461-a05a-5f00f0639977" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.893392] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "refresh_cache-192734ca-f549-4461-a05a-5f00f0639977" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.893911] env[68279]: DEBUG nova.network.neutron [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.945797] env[68279]: DEBUG oslo_vmware.rw_handles [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a5908-c427-39e4-7a56-9b9a9d0f88df/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 924.946056] env[68279]: INFO nova.virt.vmwareapi.images [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Downloaded image file data cfebc35c-c617-4fc2-9a37-bdc659062679 [ 924.946893] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0e46d3-8b3c-4b1d-886c-fd3c9d9aa29c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.967817] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4482efe9-5887-49cb-be70-5178cfc0a0fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.000356] env[68279]: INFO nova.virt.vmwareapi.images [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] The imported VM was unregistered [ 925.002715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Caching image {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 925.002947] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating directory with path [datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679 {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.003260] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b064e1e-0d2a-412c-aa3d-75fda1487cbe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.015680] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created directory with path [datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679 {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.015892] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8/OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8.vmdk to [datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679/cfebc35c-c617-4fc2-9a37-bdc659062679.vmdk. {{(pid=68279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 925.016180] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-298339eb-fccf-40ed-bb33-d6f9ab6895ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.023834] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 925.023834] env[68279]: value = "task-2963301" [ 925.023834] env[68279]: _type = "Task" [ 925.023834] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.032569] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963301, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.051108] env[68279]: DEBUG nova.compute.manager [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Received event network-vif-plugged-6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.051354] env[68279]: DEBUG oslo_concurrency.lockutils [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] Acquiring lock "192734ca-f549-4461-a05a-5f00f0639977-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.051634] env[68279]: DEBUG oslo_concurrency.lockutils [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] Lock "192734ca-f549-4461-a05a-5f00f0639977-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.051816] env[68279]: DEBUG oslo_concurrency.lockutils [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] Lock "192734ca-f549-4461-a05a-5f00f0639977-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.052010] env[68279]: DEBUG nova.compute.manager [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] No waiting events found dispatching network-vif-plugged-6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 925.052200] env[68279]: WARNING nova.compute.manager [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Received unexpected event network-vif-plugged-6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d for instance with vm_state building and task_state spawning. [ 925.052407] env[68279]: DEBUG nova.compute.manager [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Received event network-changed-6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.052556] env[68279]: DEBUG nova.compute.manager [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Refreshing instance network info cache due to event network-changed-6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 925.052738] env[68279]: DEBUG oslo_concurrency.lockutils [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] Acquiring lock "refresh_cache-192734ca-f549-4461-a05a-5f00f0639977" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.062495] env[68279]: DEBUG nova.compute.manager [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 925.062714] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.064383] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6856d5d4-cc07-440c-8399-e159e9f2c3f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.075607] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.079034] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d228a9f-e69e-43c3-b5b9-935f7bd9e6bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.087467] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 925.087467] env[68279]: value = "task-2963302" [ 925.087467] env[68279]: _type = "Task" [ 925.087467] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.097595] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.223677] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963298, 'name': CreateVM_Task, 'duration_secs': 1.643412} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.223989] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.225562] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.225562] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.226154] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.226154] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48b72dcc-4112-4bbb-b0e6-ac9036a5900a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.236646] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 925.236646] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d9714-a802-47db-320b-d543dbcb429a" [ 925.236646] env[68279]: _type = "Task" [ 925.236646] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.258142] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d9714-a802-47db-320b-d543dbcb429a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.345189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.346303] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.362802] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60ecd2d-d72e-457e-ba21-a4a02deaedb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.377793] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0b1a48-59e9-4d25-839d-6092c355b887 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.427113] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abbf7a7-e2e4-4537-b7a7-5fe686405cd8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.443073] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb55d70a-cb03-4e54-b202-22e1bf28dac8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.466537] env[68279]: DEBUG nova.compute.provider_tree [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.480729] env[68279]: DEBUG nova.network.neutron [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.544451] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963301, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.606771] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963302, 'name': PowerOffVM_Task, 'duration_secs': 0.205334} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.607171] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.607360] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.607631] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22383aeb-64e0-4588-b002-8c0ad038c2fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.699949] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.700514] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.701127] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleting the datastore file [datastore2] 19f693cd-b598-432d-acf5-64da9f640d5e {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.701228] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b59f4d1d-37dd-4f8e-a523-317f35a8b66d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.714214] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 925.714214] env[68279]: value = "task-2963304" [ 925.714214] env[68279]: _type = "Task" [ 925.714214] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.728873] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.756358] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525d9714-a802-47db-320b-d543dbcb429a, 'name': SearchDatastore_Task, 'duration_secs': 0.020501} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.756776] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.757066] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.757470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.757834] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.757834] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.758566] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e383c117-4184-4053-bb7d-1b78b1a9dc8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.770746] env[68279]: DEBUG nova.network.neutron [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Updating instance_info_cache with network_info: [{"id": "6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d", "address": "fa:16:3e:10:f8:de", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f27a4f8-cf", "ovs_interfaceid": "6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.772816] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.773237] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.774175] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30fa1914-d89b-4f84-a731-91dd661509e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.790279] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 925.790279] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fea586-77d4-2f88-1dd6-8eddba047974" [ 925.790279] env[68279]: _type = "Task" [ 925.790279] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.805595] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fea586-77d4-2f88-1dd6-8eddba047974, 'name': SearchDatastore_Task, 'duration_secs': 0.014436} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.806418] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1858f6f-e23a-4d05-a47d-a46bbcec6d2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.819594] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 925.819594] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5249aa69-cd31-fcc5-a7c3-4069d19a653a" [ 925.819594] env[68279]: _type = "Task" [ 925.819594] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.836027] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5249aa69-cd31-fcc5-a7c3-4069d19a653a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.850184] env[68279]: DEBUG nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 925.971030] env[68279]: DEBUG nova.scheduler.client.report [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.048133] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963301, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.077100] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "e0afa3e5-4a40-4257-851c-3cccf57b1724" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.077366] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.230197] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.273038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "refresh_cache-192734ca-f549-4461-a05a-5f00f0639977" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.273361] env[68279]: DEBUG nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Instance network_info: |[{"id": "6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d", "address": "fa:16:3e:10:f8:de", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f27a4f8-cf", "ovs_interfaceid": "6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 926.273764] env[68279]: DEBUG oslo_concurrency.lockutils [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] Acquired lock "refresh_cache-192734ca-f549-4461-a05a-5f00f0639977" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.273959] env[68279]: DEBUG nova.network.neutron [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Refreshing network info cache for port 6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.275336] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:f8:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.283387] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.284381] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.284614] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8507bda5-381c-4817-a5e5-8dac2897ae2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.313071] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.313071] env[68279]: value = "task-2963305" [ 926.313071] env[68279]: _type = "Task" [ 926.313071] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.327114] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963305, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.334140] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5249aa69-cd31-fcc5-a7c3-4069d19a653a, 'name': SearchDatastore_Task, 'duration_secs': 0.018575} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.334459] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.334700] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 65688756-ad94-437f-9a36-bd7e3f7f7a2b/65688756-ad94-437f-9a36-bd7e3f7f7a2b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.335045] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cd2c55e-bedd-442d-b0a9-f9f87d46331a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.344027] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 926.344027] env[68279]: value = "task-2963306" [ 926.344027] env[68279]: _type = "Task" [ 926.344027] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.355594] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963306, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.369318] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.477321] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.477867] env[68279]: DEBUG nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 926.480688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.804s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.480908] env[68279]: DEBUG nova.objects.instance [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lazy-loading 'resources' on Instance uuid 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.541208] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963301, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.563074] env[68279]: INFO nova.compute.manager [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Rebuilding instance [ 926.580126] env[68279]: DEBUG nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 926.619378] env[68279]: DEBUG nova.compute.manager [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 926.620173] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22157271-8189-476f-a70f-03b348f480a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.730598] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.836163] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963305, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.857784] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963306, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.872384] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.872384] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.989431] env[68279]: DEBUG nova.compute.utils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 926.992972] env[68279]: DEBUG nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 926.992972] env[68279]: DEBUG nova.network.neutron [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.051702] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963301, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.063809] env[68279]: DEBUG nova.policy [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7760dd4f532943c8a41dd3ffec28a19a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a6f5472e1ab4b2ab95a6c8927281b59', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 927.109641] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.174382] env[68279]: DEBUG nova.network.neutron [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Updated VIF entry in instance network info cache for port 6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.174637] env[68279]: DEBUG nova.network.neutron [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Updating instance_info_cache with network_info: [{"id": "6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d", "address": "fa:16:3e:10:f8:de", "network": {"id": "001a620f-8fe1-47de-9218-89ec33e7d939", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-924392560-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91ef3e24b66c44a29463a982c192a06e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f27a4f8-cf", "ovs_interfaceid": "6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.221694] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ce5bf310-724c-43ad-880a-90db6186b6b6 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "74bb7a04-c443-4724-bca0-160208cd657d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.221995] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ce5bf310-724c-43ad-880a-90db6186b6b6 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "74bb7a04-c443-4724-bca0-160208cd657d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.236373] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.324852] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963305, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.359319] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963306, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582891} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.360388] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 65688756-ad94-437f-9a36-bd7e3f7f7a2b/65688756-ad94-437f-9a36-bd7e3f7f7a2b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.363030] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.363030] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d75844aa-69f1-448b-845c-941ba8caf548 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.371970] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 927.371970] env[68279]: value = "task-2963307" [ 927.371970] env[68279]: _type = "Task" [ 927.371970] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.376279] env[68279]: DEBUG nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 927.396836] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963307, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.464466] env[68279]: DEBUG nova.network.neutron [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Successfully created port: 3b5c5af3-274c-4169-8f68-a3210a3cdf80 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.496267] env[68279]: DEBUG nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 927.547343] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963301, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.486555} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.547606] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8/OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8.vmdk to [datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679/cfebc35c-c617-4fc2-9a37-bdc659062679.vmdk. [ 927.547798] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Cleaning up location [datastore2] OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 927.547961] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_ce256ae9-a9d1-4810-b939-e99a4f239ac8 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.548244] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ba2c142-ce4b-467f-ba56-3624c647cee5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.552482] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9609d4d-45d3-42f5-bc29-f6bea2b02e9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.556375] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 927.556375] env[68279]: value = "task-2963308" [ 927.556375] env[68279]: _type = "Task" [ 927.556375] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.563018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e19a9ef-97a9-4ca5-b779-e9c96f4e7af2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.569456] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.599405] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce08af5-3134-4d00-9ae6-4df0a4bc9873 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.610411] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d113712-a513-4a59-8e20-ed5b0f677919 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.627340] env[68279]: DEBUG nova.compute.provider_tree [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.637715] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.638497] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57c92879-16e7-4812-8926-70a87336decc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.650503] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 927.650503] env[68279]: value = "task-2963309" [ 927.650503] env[68279]: _type = "Task" [ 927.650503] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.660387] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963309, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.678435] env[68279]: DEBUG oslo_concurrency.lockutils [req-673bfc0e-360d-4922-82fb-79b01e53ab9d req-8fcabba4-de9a-474d-920b-71b1bcb37722 service nova] Releasing lock "refresh_cache-192734ca-f549-4461-a05a-5f00f0639977" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.728232] env[68279]: DEBUG nova.compute.manager [None req-ce5bf310-724c-43ad-880a-90db6186b6b6 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 74bb7a04-c443-4724-bca0-160208cd657d] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 927.734864] env[68279]: DEBUG oslo_vmware.api [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.730791} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.735472] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.735703] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 927.735914] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.736133] env[68279]: INFO nova.compute.manager [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Took 2.67 seconds to destroy the instance on the hypervisor. [ 927.736439] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 927.736678] env[68279]: DEBUG nova.compute.manager [-] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 927.736776] env[68279]: DEBUG nova.network.neutron [-] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 927.827614] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963305, 'name': CreateVM_Task, 'duration_secs': 1.115751} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.828210] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.828939] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.829156] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.829499] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 927.830271] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01fda5d4-01dc-488a-9a98-8f3024668b2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.838753] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 927.838753] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523229bd-a708-f34b-2374-e046f0521cfd" [ 927.838753] env[68279]: _type = "Task" [ 927.838753] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.848107] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523229bd-a708-f34b-2374-e046f0521cfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.886337] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963307, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094713} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.886614] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.887535] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4401c143-09f9-495f-9db0-88061cfa4854 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.916348] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 65688756-ad94-437f-9a36-bd7e3f7f7a2b/65688756-ad94-437f-9a36-bd7e3f7f7a2b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.917573] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.917846] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2156994e-be4d-44d8-afcd-d4a722428ef6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.940727] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 927.940727] env[68279]: value = "task-2963310" [ 927.940727] env[68279]: _type = "Task" [ 927.940727] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.949810] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963310, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.067415] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11831} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.067659] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.067830] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679/cfebc35c-c617-4fc2-9a37-bdc659062679.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.068097] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679/cfebc35c-c617-4fc2-9a37-bdc659062679.vmdk to [datastore2] a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87/a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 928.068356] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e8e0a11-8431-4f0e-a839-510be73f1d5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.076844] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 928.076844] env[68279]: value = "task-2963311" [ 928.076844] env[68279]: _type = "Task" [ 928.076844] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.085614] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.131457] env[68279]: DEBUG nova.scheduler.client.report [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.162282] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 928.162558] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 928.163468] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530759f7-1759-4866-b49d-37901924cd24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.171537] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 928.171825] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dd920c2-33a1-4d58-9a08-1eea7f52780a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.236646] env[68279]: DEBUG nova.compute.manager [None req-ce5bf310-724c-43ad-880a-90db6186b6b6 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 74bb7a04-c443-4724-bca0-160208cd657d] Instance disappeared before build. {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 928.244997] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 928.245364] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 928.245620] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleting the datastore file [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.245961] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04039590-827f-4e57-b8a7-8d3d1f8ae5ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.254054] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 928.254054] env[68279]: value = "task-2963313" [ 928.254054] env[68279]: _type = "Task" [ 928.254054] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.267039] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.350970] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523229bd-a708-f34b-2374-e046f0521cfd, 'name': SearchDatastore_Task, 'duration_secs': 0.010837} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.351391] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.351571] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.351818] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.352382] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.352382] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.352525] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7114829b-bdc3-4359-8c5b-50f0728b5a17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.371756] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.371972] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.372773] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7473cff6-ce3e-4770-ae1d-77c6e4e87c6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.377641] env[68279]: DEBUG nova.compute.manager [req-74394df4-c30e-4f66-aa42-8081c8959f53 req-07181859-b031-4ade-ab97-0d6225335739 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Received event network-vif-deleted-42723128-3fe2-4814-835a-5ee1b4b1d92e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 928.377830] env[68279]: INFO nova.compute.manager [req-74394df4-c30e-4f66-aa42-8081c8959f53 req-07181859-b031-4ade-ab97-0d6225335739 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Neutron deleted interface 42723128-3fe2-4814-835a-5ee1b4b1d92e; detaching it from the instance and deleting it from the info cache [ 928.378331] env[68279]: DEBUG nova.network.neutron [req-74394df4-c30e-4f66-aa42-8081c8959f53 req-07181859-b031-4ade-ab97-0d6225335739 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.384881] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 928.384881] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526376e3-3f4e-92ed-8fb3-d0f15f26f2f6" [ 928.384881] env[68279]: _type = "Task" [ 928.384881] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.399517] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526376e3-3f4e-92ed-8fb3-d0f15f26f2f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.456420] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963310, 'name': ReconfigVM_Task, 'duration_secs': 0.292698} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.456724] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 65688756-ad94-437f-9a36-bd7e3f7f7a2b/65688756-ad94-437f-9a36-bd7e3f7f7a2b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.457478] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7219d028-07dc-4903-9f5b-97bd1316eb91 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.470332] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 928.470332] env[68279]: value = "task-2963314" [ 928.470332] env[68279]: _type = "Task" [ 928.470332] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.485713] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963314, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.505260] env[68279]: DEBUG nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 928.510744] env[68279]: DEBUG nova.network.neutron [-] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.528785] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 928.529070] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.529311] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 928.529498] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.529645] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 928.529792] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 928.530007] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 928.530216] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 928.530404] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 928.530569] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 928.530742] env[68279]: DEBUG nova.virt.hardware [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 928.532267] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31fa39d-4956-4a48-b935-b0b10de4fb08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.545722] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbe9bf3-b88c-45a8-b5f9-f24da41189ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.592832] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963311, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.637869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.641381] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.526s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.642048] env[68279]: DEBUG nova.objects.instance [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lazy-loading 'resources' on Instance uuid 5cbe4915-5b01-4424-96c8-f3225e512c89 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.659777] env[68279]: INFO nova.scheduler.client.report [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted allocations for instance 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb [ 928.750067] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ce5bf310-724c-43ad-880a-90db6186b6b6 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "74bb7a04-c443-4724-bca0-160208cd657d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.528s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.771226] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.881446] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec59b2e0-dc6e-48f0-98bb-b9a87bf173f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.901027] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526376e3-3f4e-92ed-8fb3-d0f15f26f2f6, 'name': SearchDatastore_Task, 'duration_secs': 0.08346} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.901325] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02511178-ed09-440b-9933-ed3e14947e8d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.906530] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cfee19-8e94-425f-9031-4e66729b544e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.928679] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 928.928679] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e76988-32be-5edf-ca89-fe1d35b8c2a4" [ 928.928679] env[68279]: _type = "Task" [ 928.928679] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.952874] env[68279]: DEBUG nova.compute.manager [req-74394df4-c30e-4f66-aa42-8081c8959f53 req-07181859-b031-4ade-ab97-0d6225335739 service nova] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Detach interface failed, port_id=42723128-3fe2-4814-835a-5ee1b4b1d92e, reason: Instance 19f693cd-b598-432d-acf5-64da9f640d5e could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 928.956737] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e76988-32be-5edf-ca89-fe1d35b8c2a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.986750] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963314, 'name': Rename_Task, 'duration_secs': 0.204765} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.988396] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 928.992024] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6da2ea74-ca38-43d8-9c46-621a3189bb16 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.993789] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "01404bab-6516-4783-8b9d-0738010b3e9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.994078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.007085] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 929.007085] env[68279]: value = "task-2963315" [ 929.007085] env[68279]: _type = "Task" [ 929.007085] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.013589] env[68279]: INFO nova.compute.manager [-] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Took 1.28 seconds to deallocate network for instance. [ 929.022343] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.040144] env[68279]: DEBUG nova.network.neutron [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Successfully updated port: 3b5c5af3-274c-4169-8f68-a3210a3cdf80 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 929.093605] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963311, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.173209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-33e141f7-bb7d-4389-89fe-8fc159d036b0 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "1d0f383f-6bf9-42d0-b6c6-1f276eb181cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.503s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.274891] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.444305] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e76988-32be-5edf-ca89-fe1d35b8c2a4, 'name': SearchDatastore_Task, 'duration_secs': 0.087684} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.445080] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.445080] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 192734ca-f549-4461-a05a-5f00f0639977/192734ca-f549-4461-a05a-5f00f0639977.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.445319] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b199a27-c309-494e-8358-d12a73087e05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.459227] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 929.459227] env[68279]: value = "task-2963316" [ 929.459227] env[68279]: _type = "Task" [ 929.459227] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.476270] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.497561] env[68279]: DEBUG nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 929.524912] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.524912] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963315, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.546774] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.546945] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquired lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.547230] env[68279]: DEBUG nova.network.neutron [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.598299] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963311, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.661369] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30c2231-aaa9-4a0f-a358-92f7f9ed7a47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.676369] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ae6d69-6fab-4fea-836c-a38f68f29c0b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.681177] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "50e08259-7915-49bb-b137-5cc6e9d53c16" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.681450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.681664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "50e08259-7915-49bb-b137-5cc6e9d53c16-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.681933] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.682202] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.684997] env[68279]: INFO nova.compute.manager [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Terminating instance [ 929.722110] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9197ba-ba78-4c97-90c4-30267960a7c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.736433] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37aeb5a0-11a5-44b5-869d-2cc618ca2de5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.756578] env[68279]: DEBUG nova.compute.provider_tree [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.773362] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.976527] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.022162] env[68279]: DEBUG oslo_vmware.api [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963315, 'name': PowerOnVM_Task, 'duration_secs': 0.738856} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.023220] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.023497] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.023700] env[68279]: INFO nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Took 11.23 seconds to spawn the instance on the hypervisor. [ 930.023875] env[68279]: DEBUG nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.024697] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7144b9f5-795a-47fd-9644-a8673045bbf7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.095184] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963311, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.096215] env[68279]: DEBUG nova.network.neutron [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.191354] env[68279]: DEBUG nova.compute.manager [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.191530] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.192463] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4e5f0d-4a17-4bec-b2df-e8fdfc315453 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.205487] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.205838] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b4744b7-b129-4c08-9564-8b5bc6ebf3bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.221183] env[68279]: DEBUG oslo_vmware.api [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 930.221183] env[68279]: value = "task-2963317" [ 930.221183] env[68279]: _type = "Task" [ 930.221183] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.235510] env[68279]: DEBUG oslo_vmware.api [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.245228] env[68279]: DEBUG nova.network.neutron [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Updating instance_info_cache with network_info: [{"id": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "address": "fa:16:3e:27:48:99", "network": {"id": "4c637790-40fd-487d-996f-c12eda78c7db", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2010423208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a6f5472e1ab4b2ab95a6c8927281b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5c5af3-27", "ovs_interfaceid": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.262373] env[68279]: DEBUG nova.scheduler.client.report [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.275841] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963313, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.838353} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.276136] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 930.276336] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 930.276621] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 930.471946] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.547171] env[68279]: INFO nova.compute.manager [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Took 54.59 seconds to build instance. [ 930.564869] env[68279]: DEBUG nova.compute.manager [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Received event network-vif-plugged-3b5c5af3-274c-4169-8f68-a3210a3cdf80 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.565164] env[68279]: DEBUG oslo_concurrency.lockutils [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] Acquiring lock "2cdd785d-6758-469f-b1f6-266154853f8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.565378] env[68279]: DEBUG oslo_concurrency.lockutils [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] Lock "2cdd785d-6758-469f-b1f6-266154853f8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.565544] env[68279]: DEBUG oslo_concurrency.lockutils [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] Lock "2cdd785d-6758-469f-b1f6-266154853f8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.565711] env[68279]: DEBUG nova.compute.manager [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] No waiting events found dispatching network-vif-plugged-3b5c5af3-274c-4169-8f68-a3210a3cdf80 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 930.565870] env[68279]: WARNING nova.compute.manager [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Received unexpected event network-vif-plugged-3b5c5af3-274c-4169-8f68-a3210a3cdf80 for instance with vm_state building and task_state spawning. [ 930.566040] env[68279]: DEBUG nova.compute.manager [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Received event network-changed-3b5c5af3-274c-4169-8f68-a3210a3cdf80 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.566196] env[68279]: DEBUG nova.compute.manager [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Refreshing instance network info cache due to event network-changed-3b5c5af3-274c-4169-8f68-a3210a3cdf80. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 930.566440] env[68279]: DEBUG oslo_concurrency.lockutils [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] Acquiring lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.590218] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963311, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.457296} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.590430] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfebc35c-c617-4fc2-9a37-bdc659062679/cfebc35c-c617-4fc2-9a37-bdc659062679.vmdk to [datastore2] a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87/a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.591223] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd3da61-a456-4e93-a8b2-eedcc866eb44 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.614901] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87/a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.615194] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73c0f020-c6f3-4229-88ea-21c88c709b6c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.634883] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 930.634883] env[68279]: value = "task-2963318" [ 930.634883] env[68279]: _type = "Task" [ 930.634883] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.645036] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963318, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.732342] env[68279]: DEBUG oslo_vmware.api [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963317, 'name': PowerOffVM_Task, 'duration_secs': 0.337916} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.732630] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.732800] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 930.733071] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62626e97-417a-4bd0-8e74-3e70a57a128c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.749135] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Releasing lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.749135] env[68279]: DEBUG nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Instance network_info: |[{"id": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "address": "fa:16:3e:27:48:99", "network": {"id": "4c637790-40fd-487d-996f-c12eda78c7db", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2010423208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a6f5472e1ab4b2ab95a6c8927281b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5c5af3-27", "ovs_interfaceid": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 930.749135] env[68279]: DEBUG oslo_concurrency.lockutils [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] Acquired lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.749135] env[68279]: DEBUG nova.network.neutron [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Refreshing network info cache for port 3b5c5af3-274c-4169-8f68-a3210a3cdf80 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.751071] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:48:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f35e69ef-c2c8-4b8c-9887-33e97b242c0a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b5c5af3-274c-4169-8f68-a3210a3cdf80', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.758929] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Creating folder: Project (8a6f5472e1ab4b2ab95a6c8927281b59). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.761863] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3145802d-6c13-4b2f-8fe4-c2ede9f82d5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.767874] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.127s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.770314] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.510s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.774841] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Created folder: Project (8a6f5472e1ab4b2ab95a6c8927281b59) in parent group-v594445. [ 930.774841] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Creating folder: Instances. Parent ref: group-v594658. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.774841] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4659155c-a982-425d-b95e-220926435f64 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.785478] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Created folder: Instances in parent group-v594658. [ 930.785732] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 930.786253] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.786463] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-529a01b7-8ea4-42d0-aabe-12e6554f7a9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.806575] env[68279]: INFO nova.scheduler.client.report [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Deleted allocations for instance 5cbe4915-5b01-4424-96c8-f3225e512c89 [ 930.810061] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 930.810290] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 930.810431] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleting the datastore file [datastore2] 50e08259-7915-49bb-b137-5cc6e9d53c16 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.816019] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca1c3a44-1c05-4a31-94a3-c92afd9f8dcd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.821444] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.821444] env[68279]: value = "task-2963322" [ 930.821444] env[68279]: _type = "Task" [ 930.821444] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.822949] env[68279]: DEBUG oslo_vmware.api [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for the task: (returnval){ [ 930.822949] env[68279]: value = "task-2963323" [ 930.822949] env[68279]: _type = "Task" [ 930.822949] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.835447] env[68279]: DEBUG oslo_vmware.api [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.838961] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963322, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.977539] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963316, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.049725] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8dacca5e-1689-4690-be66-336406079386 tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.103s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.051033] env[68279]: DEBUG nova.network.neutron [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Updated VIF entry in instance network info cache for port 3b5c5af3-274c-4169-8f68-a3210a3cdf80. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.051407] env[68279]: DEBUG nova.network.neutron [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Updating instance_info_cache with network_info: [{"id": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "address": "fa:16:3e:27:48:99", "network": {"id": "4c637790-40fd-487d-996f-c12eda78c7db", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2010423208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a6f5472e1ab4b2ab95a6c8927281b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5c5af3-27", "ovs_interfaceid": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.147639] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.275562] env[68279]: INFO nova.compute.claims [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.315291] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 931.315569] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.315794] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 931.315949] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.316113] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 931.316269] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 931.316481] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 931.316641] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 931.316813] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 931.317032] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 931.317184] env[68279]: DEBUG nova.virt.hardware [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 931.321771] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a43312d-308c-4106-8c1f-c9a8d10e7a03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.323890] env[68279]: DEBUG oslo_concurrency.lockutils [None req-87c76e09-425e-44ed-a522-1f931aa70ad2 tempest-ServerGroupTestJSON-1076332922 tempest-ServerGroupTestJSON-1076332922-project-member] Lock "5cbe4915-5b01-4424-96c8-f3225e512c89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.964s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.342429] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c74964-4e90-40c8-b944-c6d74fc97f91 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.346343] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963322, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.350142] env[68279]: DEBUG oslo_vmware.api [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.360947] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:a9:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81c058ad-3832-478e-b2c5-f65692f52164', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.368271] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.368796] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 931.369024] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f58d8c6-e07c-4e05-b9cc-902cf7441e9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.390123] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.390123] env[68279]: value = "task-2963324" [ 931.390123] env[68279]: _type = "Task" [ 931.390123] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.399803] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963324, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.419800] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.420078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.420340] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.420540] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.420710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.423138] env[68279]: INFO nova.compute.manager [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Terminating instance [ 931.475056] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963316, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.795968} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.475056] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 192734ca-f549-4461-a05a-5f00f0639977/192734ca-f549-4461-a05a-5f00f0639977.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.475398] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.475445] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76f7fdf1-c33d-4fd5-812e-b5e9f84b0493 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.485504] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 931.485504] env[68279]: value = "task-2963325" [ 931.485504] env[68279]: _type = "Task" [ 931.485504] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.494630] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.554139] env[68279]: DEBUG oslo_concurrency.lockutils [req-c65b97dd-962a-4ecd-a627-12cc6805c3b0 req-4b6bfb92-e2f6-4d2a-9392-95296f8132fb service nova] Releasing lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.648076] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.782818] env[68279]: INFO nova.compute.resource_tracker [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating resource usage from migration c434e3c1-7ad3-4c78-887a-0bc29e467e06 [ 931.836670] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963322, 'name': CreateVM_Task, 'duration_secs': 0.565504} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.839959] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.840923] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.841043] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.841367] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 931.841640] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54430c41-21dc-42e9-9312-67eaa4877747 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.847175] env[68279]: DEBUG oslo_vmware.api [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Task: {'id': task-2963323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.803563} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.848048] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.848048] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.848309] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.848309] env[68279]: INFO nova.compute.manager [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Took 1.66 seconds to destroy the instance on the hypervisor. [ 931.848675] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.848757] env[68279]: DEBUG nova.compute.manager [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 931.848904] env[68279]: DEBUG nova.network.neutron [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.852161] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 931.852161] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52039077-786c-7115-d4cd-79eb0e9906d6" [ 931.852161] env[68279]: _type = "Task" [ 931.852161] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.864462] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52039077-786c-7115-d4cd-79eb0e9906d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.903088] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963324, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.928761] env[68279]: DEBUG nova.compute.manager [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 931.929050] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 931.932646] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2862057a-9208-49b5-b5ed-1ff0560c31d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.945280] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.947202] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea2c3ac1-178f-4fae-8f24-3f3fd6ae43b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.955071] env[68279]: DEBUG oslo_vmware.api [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 931.955071] env[68279]: value = "task-2963326" [ 931.955071] env[68279]: _type = "Task" [ 931.955071] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.964726] env[68279]: DEBUG oslo_vmware.api [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.996841] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069242} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.997118] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.998084] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311dd044-818e-4e18-9dcf-44a58a00c8cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.023518] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 192734ca-f549-4461-a05a-5f00f0639977/192734ca-f549-4461-a05a-5f00f0639977.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.026350] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa5bc506-8165-49af-84c8-40f96cd94979 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.047484] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 932.047484] env[68279]: value = "task-2963327" [ 932.047484] env[68279]: _type = "Task" [ 932.047484] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.057831] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963327, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.148282] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963318, 'name': ReconfigVM_Task, 'duration_secs': 1.307327} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.151442] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Reconfigured VM instance instance-00000048 to attach disk [datastore2] a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87/a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.152697] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa382fc3-4e4f-4ec2-8777-a4abc4b14aaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.161530] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 932.161530] env[68279]: value = "task-2963328" [ 932.161530] env[68279]: _type = "Task" [ 932.161530] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.178960] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963328, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.184243] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.184535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.184744] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.185040] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.185332] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.187418] env[68279]: INFO nova.compute.manager [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Terminating instance [ 932.272781] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bf6f23-fe4b-4ed4-bbeb-0ee70476c4d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.282176] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce4b667-51fe-4bcb-8a70-a27f68a86974 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.321215] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4d9451-5b5f-408a-a4ff-06ccd011933d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.330733] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbeef8bf-f4d4-4c42-b727-8ad8d989012e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.346878] env[68279]: DEBUG nova.compute.provider_tree [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.368159] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52039077-786c-7115-d4cd-79eb0e9906d6, 'name': SearchDatastore_Task, 'duration_secs': 0.015848} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.368472] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.368709] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.369051] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.369162] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.369318] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.369595] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74ac92f8-1a50-41e4-989a-64931efff3c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.379698] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.380132] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.380681] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-135dad2e-43e7-4313-ad13-77c9782d422b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.387034] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 932.387034] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5255d79d-835e-ae55-c307-2f5bedf141b9" [ 932.387034] env[68279]: _type = "Task" [ 932.387034] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.396733] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5255d79d-835e-ae55-c307-2f5bedf141b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.405474] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963324, 'name': CreateVM_Task, 'duration_secs': 0.530188} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.405632] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.406342] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.406645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.406893] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 932.407172] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b7ae896-e7b8-4e91-b7b7-96d13db8a59b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.412817] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 932.412817] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b4cad1-69ce-6e19-d18f-b42e024d752b" [ 932.412817] env[68279]: _type = "Task" [ 932.412817] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.421348] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b4cad1-69ce-6e19-d18f-b42e024d752b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.465460] env[68279]: DEBUG oslo_vmware.api [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963326, 'name': PowerOffVM_Task, 'duration_secs': 0.384846} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.465737] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.466209] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.466209] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79232597-34e2-440b-bc24-c34eecd7f58d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.558670] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963327, 'name': ReconfigVM_Task, 'duration_secs': 0.466191} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.560016] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 192734ca-f549-4461-a05a-5f00f0639977/192734ca-f549-4461-a05a-5f00f0639977.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.560016] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfe3064f-57ab-4f76-9c2c-53a832b4cc96 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.568175] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 932.568175] env[68279]: value = "task-2963330" [ 932.568175] env[68279]: _type = "Task" [ 932.568175] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.580598] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963330, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.593316] env[68279]: DEBUG nova.compute.manager [req-1d6d01f2-6a65-4301-bd7a-9749a39f7a68 req-acf98c7d-9029-4e46-88f5-4f9b179b728f service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Received event network-vif-deleted-ab5d6d31-7d88-47ee-a53a-80e39c3e2a72 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.593519] env[68279]: INFO nova.compute.manager [req-1d6d01f2-6a65-4301-bd7a-9749a39f7a68 req-acf98c7d-9029-4e46-88f5-4f9b179b728f service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Neutron deleted interface ab5d6d31-7d88-47ee-a53a-80e39c3e2a72; detaching it from the instance and deleting it from the info cache [ 932.593800] env[68279]: DEBUG nova.network.neutron [req-1d6d01f2-6a65-4301-bd7a-9749a39f7a68 req-acf98c7d-9029-4e46-88f5-4f9b179b728f service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.617931] env[68279]: DEBUG nova.network.neutron [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.676198] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963328, 'name': Rename_Task, 'duration_secs': 0.230552} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.676198] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.676451] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75453b97-02eb-409c-afa4-aa7d10e34cde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.685051] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 932.685051] env[68279]: value = "task-2963331" [ 932.685051] env[68279]: _type = "Task" [ 932.685051] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.694577] env[68279]: DEBUG nova.compute.manager [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.694802] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.695154] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.695988] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee5b0b4-70f7-4259-bbee-10b7a01089c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.704202] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.704501] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea1b5220-09dc-43ca-a9fd-931c00c0a86e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.713586] env[68279]: DEBUG oslo_vmware.api [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 932.713586] env[68279]: value = "task-2963332" [ 932.713586] env[68279]: _type = "Task" [ 932.713586] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.725461] env[68279]: DEBUG oslo_vmware.api [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.851331] env[68279]: DEBUG nova.scheduler.client.report [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.903288] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5255d79d-835e-ae55-c307-2f5bedf141b9, 'name': SearchDatastore_Task, 'duration_secs': 0.012841} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.904223] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-559cbae9-85ef-4ea3-a7ba-f114ec3a9994 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.913176] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 932.913176] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f75669-d17f-bad0-b3e9-11086dc00432" [ 932.913176] env[68279]: _type = "Task" [ 932.913176] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.926070] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f75669-d17f-bad0-b3e9-11086dc00432, 'name': SearchDatastore_Task, 'duration_secs': 0.011771} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.931501] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.931793] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 2cdd785d-6758-469f-b1f6-266154853f8c/2cdd785d-6758-469f-b1f6-266154853f8c.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.932122] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b4cad1-69ce-6e19-d18f-b42e024d752b, 'name': SearchDatastore_Task, 'duration_secs': 0.013435} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.932343] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5735f654-2f16-4450-8447-ae58614a0197 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.934665] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.934889] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.935137] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.935430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.935610] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.935890] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9017bb88-9461-4955-a19f-9fbba41772b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.945702] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 932.945702] env[68279]: value = "task-2963333" [ 932.945702] env[68279]: _type = "Task" [ 932.945702] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.956189] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.959266] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.959575] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.960664] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98a8f96e-00dd-46e8-82d8-5e1e237b4d29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.967796] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 932.967796] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52130dc3-3693-68f2-e95b-7bfa9f52f08d" [ 932.967796] env[68279]: _type = "Task" [ 932.967796] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.980673] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52130dc3-3693-68f2-e95b-7bfa9f52f08d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.079035] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963330, 'name': Rename_Task, 'duration_secs': 0.146531} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.079035] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.079225] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24584595-961e-4211-84f7-46e3a44b7b99 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.086746] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 933.086746] env[68279]: value = "task-2963334" [ 933.086746] env[68279]: _type = "Task" [ 933.086746] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.099580] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963334, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.099879] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e546e68-94be-4a4a-8a9a-a8d30af5e702 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.109881] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68dc646-58d2-4d57-8f29-55cb11e6eb4a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.121534] env[68279]: INFO nova.compute.manager [-] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Took 1.27 seconds to deallocate network for instance. [ 933.156416] env[68279]: DEBUG nova.compute.manager [req-1d6d01f2-6a65-4301-bd7a-9749a39f7a68 req-acf98c7d-9029-4e46-88f5-4f9b179b728f service nova] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Detach interface failed, port_id=ab5d6d31-7d88-47ee-a53a-80e39c3e2a72, reason: Instance 50e08259-7915-49bb-b137-5cc6e9d53c16 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 933.196269] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963331, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.225534] env[68279]: DEBUG oslo_vmware.api [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963332, 'name': PowerOffVM_Task, 'duration_secs': 0.26727} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.225594] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.225806] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.226145] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6126a2eb-021f-480a-8e98-3422ba17d3a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.302973] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.302973] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.302973] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Deleting the datastore file [datastore1] 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.304483] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60241ab4-08ee-49b3-89d9-7aab7103f09c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.307411] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.307688] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.307949] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Deleting the datastore file [datastore1] 65688756-ad94-437f-9a36-bd7e3f7f7a2b {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.308292] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-409730be-7adc-420e-b1db-51067260c4d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.316724] env[68279]: DEBUG oslo_vmware.api [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 933.316724] env[68279]: value = "task-2963336" [ 933.316724] env[68279]: _type = "Task" [ 933.316724] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.318512] env[68279]: DEBUG oslo_vmware.api [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for the task: (returnval){ [ 933.318512] env[68279]: value = "task-2963337" [ 933.318512] env[68279]: _type = "Task" [ 933.318512] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.333875] env[68279]: DEBUG oslo_vmware.api [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.358036] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.588s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.358265] env[68279]: INFO nova.compute.manager [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Migrating [ 933.365576] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.655s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.365797] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.368493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.439s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.371032] env[68279]: INFO nova.compute.claims [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.407582] env[68279]: INFO nova.scheduler.client.report [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Deleted allocations for instance 665d932d-1068-4bb2-835c-2184a80753d1 [ 933.459930] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963333, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.480493] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52130dc3-3693-68f2-e95b-7bfa9f52f08d, 'name': SearchDatastore_Task, 'duration_secs': 0.021049} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.481385] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86491855-c7d9-4099-9d12-001fc8e0af62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.488292] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 933.488292] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5224b763-38d1-0bc6-cfa8-fee866462589" [ 933.488292] env[68279]: _type = "Task" [ 933.488292] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.498504] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5224b763-38d1-0bc6-cfa8-fee866462589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.597351] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963334, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.629185] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.698348] env[68279]: DEBUG oslo_vmware.api [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963331, 'name': PowerOnVM_Task, 'duration_secs': 0.577157} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.698630] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.698821] env[68279]: INFO nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Took 17.50 seconds to spawn the instance on the hypervisor. [ 933.699016] env[68279]: DEBUG nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.699888] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d292799f-c16d-40e9-b5e9-847f09b2523b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.835796] env[68279]: DEBUG oslo_vmware.api [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199758} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.836489] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.836694] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.836918] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.837303] env[68279]: INFO nova.compute.manager [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Took 1.91 seconds to destroy the instance on the hypervisor. [ 933.837435] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.837742] env[68279]: DEBUG nova.compute.manager [-] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.837742] env[68279]: DEBUG nova.network.neutron [-] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.844126] env[68279]: DEBUG oslo_vmware.api [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Task: {'id': task-2963337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202935} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.844705] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.844902] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.845194] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.845290] env[68279]: INFO nova.compute.manager [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 933.845569] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.845774] env[68279]: DEBUG nova.compute.manager [-] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.845860] env[68279]: DEBUG nova.network.neutron [-] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.886525] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.886717] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.886811] env[68279]: DEBUG nova.network.neutron [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.918413] env[68279]: DEBUG oslo_concurrency.lockutils [None req-35b0075e-ced2-435c-80e0-4bf5b4fc6602 tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "665d932d-1068-4bb2-835c-2184a80753d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.901s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.958507] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963333, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.000982] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5224b763-38d1-0bc6-cfa8-fee866462589, 'name': SearchDatastore_Task, 'duration_secs': 0.012184} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.003314] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.003665] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.003950] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27bc1a78-9207-4430-bb28-0cf258d9ae02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.015540] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 934.015540] env[68279]: value = "task-2963338" [ 934.015540] env[68279]: _type = "Task" [ 934.015540] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.027782] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.099387] env[68279]: DEBUG oslo_vmware.api [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963334, 'name': PowerOnVM_Task, 'duration_secs': 0.730996} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.099704] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.099914] env[68279]: INFO nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Took 10.52 seconds to spawn the instance on the hypervisor. [ 934.100119] env[68279]: DEBUG nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.101091] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b057ec8-21b7-4d89-afbf-f788c69e97d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.220252] env[68279]: INFO nova.compute.manager [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Took 62.09 seconds to build instance. [ 934.459653] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963333, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.528126] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.624727] env[68279]: INFO nova.compute.manager [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Took 35.02 seconds to build instance. [ 934.722479] env[68279]: DEBUG oslo_concurrency.lockutils [None req-145131c1-3170-48ac-b2b8-ca6c88e8ad98 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.603s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.755897] env[68279]: DEBUG nova.network.neutron [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance_info_cache with network_info: [{"id": "58be91e3-be6b-4118-8032-e40c6f5f099d", "address": "fa:16:3e:79:d1:f2", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58be91e3-be", "ovs_interfaceid": "58be91e3-be6b-4118-8032-e40c6f5f099d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.783883] env[68279]: DEBUG nova.compute.manager [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Received event network-vif-deleted-fdf1f0ce-bee9-4281-94ed-d2d77cf2e560 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 934.784323] env[68279]: INFO nova.compute.manager [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Neutron deleted interface fdf1f0ce-bee9-4281-94ed-d2d77cf2e560; detaching it from the instance and deleting it from the info cache [ 934.784575] env[68279]: DEBUG nova.network.neutron [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.854741] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0a9790-eff3-4ecb-bfb6-71b1810426b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.867108] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa21329b-8ae2-431f-9ee0-0eb8f1b19139 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.907699] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92a47a2-f36c-4666-b9dd-4fe725ed9a89 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.917729] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a004ff68-fab7-4e16-9bce-0ba347e49f7c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.935556] env[68279]: DEBUG nova.compute.provider_tree [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.937091] env[68279]: DEBUG nova.network.neutron [-] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.959540] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963333, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.638103} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.960087] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 2cdd785d-6758-469f-b1f6-266154853f8c/2cdd785d-6758-469f-b1f6-266154853f8c.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 934.960370] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 934.961120] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f290e20-d2db-43ba-8f4f-3acc58c75346 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.969439] env[68279]: DEBUG nova.network.neutron [-] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.971910] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 934.971910] env[68279]: value = "task-2963339" [ 934.971910] env[68279]: _type = "Task" [ 934.971910] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.983732] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963339, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.034756] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963338, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.079831] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "192734ca-f549-4461-a05a-5f00f0639977" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.130824] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86729401-b0c3-4e18-b07b-ddacbb461a16 tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "192734ca-f549-4461-a05a-5f00f0639977" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.545s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.131016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "192734ca-f549-4461-a05a-5f00f0639977" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.051s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.131260] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "192734ca-f549-4461-a05a-5f00f0639977-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.131501] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "192734ca-f549-4461-a05a-5f00f0639977-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.131701] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "192734ca-f549-4461-a05a-5f00f0639977-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.134145] env[68279]: INFO nova.compute.manager [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Terminating instance [ 935.228747] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.229036] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.229317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.229507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.229678] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.232288] env[68279]: INFO nova.compute.manager [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Terminating instance [ 935.258463] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.292017] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-088464cc-bc28-4a55-963c-9f7b5c980f83 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.303035] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f042990-a935-4d4b-9bb5-a4f1b07c922b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.339434] env[68279]: DEBUG nova.compute.manager [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Detach interface failed, port_id=fdf1f0ce-bee9-4281-94ed-d2d77cf2e560, reason: Instance 65688756-ad94-437f-9a36-bd7e3f7f7a2b could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 935.339672] env[68279]: DEBUG nova.compute.manager [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Received event network-vif-deleted-df5903a9-63d8-44d6-8066-6790510ea180 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.339845] env[68279]: INFO nova.compute.manager [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Neutron deleted interface df5903a9-63d8-44d6-8066-6790510ea180; detaching it from the instance and deleting it from the info cache [ 935.340024] env[68279]: DEBUG nova.network.neutron [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.439803] env[68279]: DEBUG nova.scheduler.client.report [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.445245] env[68279]: INFO nova.compute.manager [-] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Took 1.60 seconds to deallocate network for instance. [ 935.474200] env[68279]: INFO nova.compute.manager [-] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Took 1.64 seconds to deallocate network for instance. [ 935.488480] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963339, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.320924} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.488740] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.489582] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df415bee-530e-45f4-9f85-f8764336b614 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.514010] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 2cdd785d-6758-469f-b1f6-266154853f8c/2cdd785d-6758-469f-b1f6-266154853f8c.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.514893] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da141db9-bdf6-4b2d-b6e0-3352f7151101 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.529777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.530096] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.530223] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.530404] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.530570] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.535804] env[68279]: INFO nova.compute.manager [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Terminating instance [ 935.543175] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963338, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.175993} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.544990] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.544990] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.545266] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 935.545266] env[68279]: value = "task-2963340" [ 935.545266] env[68279]: _type = "Task" [ 935.545266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.546026] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-733ce9b0-0b61-47b6-902f-26b3c35ccaed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.557028] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963340, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.558341] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 935.558341] env[68279]: value = "task-2963341" [ 935.558341] env[68279]: _type = "Task" [ 935.558341] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.567869] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963341, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.637966] env[68279]: DEBUG nova.compute.manager [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.638212] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.639169] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4640ab1-f434-4619-986a-cb8a90d1027a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.647097] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.647362] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac29f278-0cfe-4c90-8fc3-516b8da3202e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.654549] env[68279]: DEBUG oslo_vmware.api [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 935.654549] env[68279]: value = "task-2963342" [ 935.654549] env[68279]: _type = "Task" [ 935.654549] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.663231] env[68279]: DEBUG oslo_vmware.api [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.739894] env[68279]: DEBUG nova.compute.manager [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.739894] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.739894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9ff554-0b1e-42f4-8529-5517042c1e48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.748917] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.749525] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d08f761-5132-463c-90c0-51f965d76ec1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.756234] env[68279]: DEBUG oslo_vmware.api [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 935.756234] env[68279]: value = "task-2963343" [ 935.756234] env[68279]: _type = "Task" [ 935.756234] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.769618] env[68279]: DEBUG oslo_vmware.api [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.842868] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c0702cb-c4f2-4a02-bf22-0ee6dd376e55 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.856542] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ee143a-f597-4a3b-857b-3ea0d3acae07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.913914] env[68279]: DEBUG nova.compute.manager [req-65f7b302-2f3d-4da0-96a9-39b1599448b1 req-6e9eebc3-bc51-4349-add3-dbfd7d9f2572 service nova] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Detach interface failed, port_id=df5903a9-63d8-44d6-8066-6790510ea180, reason: Instance 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 935.947069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.947714] env[68279]: DEBUG nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 935.951724] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.101s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.952065] env[68279]: DEBUG nova.objects.instance [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'pci_requests' on Instance uuid eccc5882-2c8b-456d-bbd2-d9ed22777a77 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.953964] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.984256] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.039425] env[68279]: DEBUG nova.compute.manager [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.039719] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.040667] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d43dc1-d0d5-4afd-94ce-a3ea183d4c2d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.049010] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.053235] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84de4955-b2b0-4b71-9d7c-d3d87ca2d64f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.060998] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963340, 'name': ReconfigVM_Task, 'duration_secs': 0.364737} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.065334] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 2cdd785d-6758-469f-b1f6-266154853f8c/2cdd785d-6758-469f-b1f6-266154853f8c.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.066098] env[68279]: DEBUG oslo_vmware.api [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 936.066098] env[68279]: value = "task-2963344" [ 936.066098] env[68279]: _type = "Task" [ 936.066098] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.066630] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2285db7-1601-46d0-9788-3fc5e5420f84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.075311] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963341, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071768} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.075960] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.076775] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26243c3d-4a93-4ec4-929b-84ec269bb35e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.082500] env[68279]: DEBUG oslo_vmware.api [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.084167] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 936.084167] env[68279]: value = "task-2963345" [ 936.084167] env[68279]: _type = "Task" [ 936.084167] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.104720] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.105595] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d346d185-7400-4367-838a-9848fe54c1d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.124495] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963345, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.130716] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 936.130716] env[68279]: value = "task-2963346" [ 936.130716] env[68279]: _type = "Task" [ 936.130716] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.144015] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963346, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.165292] env[68279]: DEBUG oslo_vmware.api [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963342, 'name': PowerOffVM_Task, 'duration_secs': 0.218133} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.165582] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.165753] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.166035] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4405a539-ed5b-4e27-9ed2-4859bf6efd07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.230804] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.231269] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.231364] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleting the datastore file [datastore2] 192734ca-f549-4461-a05a-5f00f0639977 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.231639] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac2db29f-32b2-4969-b7ee-4c67547be957 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.240736] env[68279]: DEBUG oslo_vmware.api [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for the task: (returnval){ [ 936.240736] env[68279]: value = "task-2963348" [ 936.240736] env[68279]: _type = "Task" [ 936.240736] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.249997] env[68279]: DEBUG oslo_vmware.api [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.266223] env[68279]: DEBUG oslo_vmware.api [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963343, 'name': PowerOffVM_Task, 'duration_secs': 0.23255} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.266515] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.266701] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.266974] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d17d532-1b50-497d-8a9c-bbf546781e72 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.334686] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.334935] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.335165] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Deleting the datastore file [datastore2] 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.335454] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f376404-2fa5-403a-8092-e2263be72ef9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.342373] env[68279]: DEBUG oslo_vmware.api [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for the task: (returnval){ [ 936.342373] env[68279]: value = "task-2963350" [ 936.342373] env[68279]: _type = "Task" [ 936.342373] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.350802] env[68279]: DEBUG oslo_vmware.api [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.459761] env[68279]: DEBUG nova.objects.instance [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'numa_topology' on Instance uuid eccc5882-2c8b-456d-bbd2-d9ed22777a77 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.461821] env[68279]: DEBUG nova.compute.utils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 936.463921] env[68279]: DEBUG nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 936.463921] env[68279]: DEBUG nova.network.neutron [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 936.525927] env[68279]: DEBUG nova.policy [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36b39430ec184c72b8950247fd1added', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37b1b1fd2ea44d83b954e5b90ae9e3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 936.582686] env[68279]: DEBUG oslo_vmware.api [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963344, 'name': PowerOffVM_Task, 'duration_secs': 0.193494} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.583268] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.583697] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.588041] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-629df4d8-bb4f-4b64-b99e-28234dba4162 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.596052] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963345, 'name': Rename_Task, 'duration_secs': 0.154022} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.599474] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.599474] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a717b2d1-67fb-461d-a1d3-3b0f6bf16eb4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.605515] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 936.605515] env[68279]: value = "task-2963352" [ 936.605515] env[68279]: _type = "Task" [ 936.605515] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.615711] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.644206] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963346, 'name': ReconfigVM_Task, 'duration_secs': 0.333227} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.644206] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74/02f34ac7-9deb-4714-92cb-bb507fde1e74.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.644206] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afb5fed7-0bce-4233-af28-de5e833a6ca9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.655560] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 936.655560] env[68279]: value = "task-2963353" [ 936.655560] env[68279]: _type = "Task" [ 936.655560] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.661651] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.664028] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.664028] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleting the datastore file [datastore2] a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.664028] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43fed12b-acef-4f8b-b03e-f99ce551551a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.672361] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963353, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.673988] env[68279]: DEBUG oslo_vmware.api [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 936.673988] env[68279]: value = "task-2963354" [ 936.673988] env[68279]: _type = "Task" [ 936.673988] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.682562] env[68279]: DEBUG oslo_vmware.api [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.752082] env[68279]: DEBUG oslo_vmware.api [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Task: {'id': task-2963348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207463} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.752357] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.752542] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.752717] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.752888] env[68279]: INFO nova.compute.manager [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Took 1.11 seconds to destroy the instance on the hypervisor. [ 936.753147] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.753345] env[68279]: DEBUG nova.compute.manager [-] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.753437] env[68279]: DEBUG nova.network.neutron [-] [instance: 192734ca-f549-4461-a05a-5f00f0639977] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.778815] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2b1f35-d923-4a24-8fdd-39216ce3830a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.802095] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance '866eb440-4fc9-4708-8a3b-b53f2be3f6c8' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 936.858989] env[68279]: DEBUG oslo_vmware.api [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Task: {'id': task-2963350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180707} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.860244] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.860244] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.860244] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.860244] env[68279]: INFO nova.compute.manager [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 936.860412] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.860539] env[68279]: DEBUG nova.compute.manager [-] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.860635] env[68279]: DEBUG nova.network.neutron [-] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.873829] env[68279]: DEBUG nova.network.neutron [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Successfully created port: aa608983-7e53-4df9-8ff2-59e7ad8d5f13 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.967032] env[68279]: INFO nova.compute.claims [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.968104] env[68279]: DEBUG nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 937.122748] env[68279]: DEBUG oslo_vmware.api [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963352, 'name': PowerOnVM_Task, 'duration_secs': 0.51312} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.123704] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.123926] env[68279]: INFO nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Took 8.62 seconds to spawn the instance on the hypervisor. [ 937.124123] env[68279]: DEBUG nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.129760] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b66781e-7cb3-4750-bf64-f4a5f1aeac60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.145701] env[68279]: DEBUG nova.compute.manager [req-94f8e80b-04e5-44c9-af5f-a337cacf4919 req-be8c5dad-394e-4248-9ae7-fe39b78705bf service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Received event network-vif-deleted-6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.145701] env[68279]: INFO nova.compute.manager [req-94f8e80b-04e5-44c9-af5f-a337cacf4919 req-be8c5dad-394e-4248-9ae7-fe39b78705bf service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Neutron deleted interface 6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d; detaching it from the instance and deleting it from the info cache [ 937.145701] env[68279]: DEBUG nova.network.neutron [req-94f8e80b-04e5-44c9-af5f-a337cacf4919 req-be8c5dad-394e-4248-9ae7-fe39b78705bf service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.169801] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963353, 'name': Rename_Task, 'duration_secs': 0.171625} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.170077] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.172016] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fea5ba6-01c8-4e01-80d6-39e86b5df40d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.180379] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 937.180379] env[68279]: value = "task-2963355" [ 937.180379] env[68279]: _type = "Task" [ 937.180379] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.191247] env[68279]: DEBUG oslo_vmware.api [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178158} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.191247] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.191247] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.191247] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.191247] env[68279]: INFO nova.compute.manager [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Took 1.15 seconds to destroy the instance on the hypervisor. [ 937.191247] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.191247] env[68279]: DEBUG nova.compute.manager [-] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.191247] env[68279]: DEBUG nova.network.neutron [-] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.195573] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963355, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.309415] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.309772] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45e2eb69-57c5-405d-9cbd-0a998fbe578e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.318246] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 937.318246] env[68279]: value = "task-2963356" [ 937.318246] env[68279]: _type = "Task" [ 937.318246] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.327083] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.630413] env[68279]: DEBUG nova.network.neutron [-] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.646291] env[68279]: INFO nova.compute.manager [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Took 31.63 seconds to build instance. [ 937.648028] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c0cc5e8-42af-486d-90e6-b1fac2475f96 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.661432] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c460cbe-054e-4425-bbf0-3423296185b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.690904] env[68279]: DEBUG nova.network.neutron [-] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.692763] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963355, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.710417] env[68279]: DEBUG nova.compute.manager [req-94f8e80b-04e5-44c9-af5f-a337cacf4919 req-be8c5dad-394e-4248-9ae7-fe39b78705bf service nova] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Detach interface failed, port_id=6f27a4f8-cf10-4513-b0f7-5fb41b5fff5d, reason: Instance 192734ca-f549-4461-a05a-5f00f0639977 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 937.711154] env[68279]: INFO nova.compute.manager [-] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Took 0.85 seconds to deallocate network for instance. [ 937.828267] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963356, 'name': PowerOffVM_Task, 'duration_secs': 0.217793} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.828586] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.828863] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance '866eb440-4fc9-4708-8a3b-b53f2be3f6c8' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 937.926820] env[68279]: DEBUG nova.network.neutron [-] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.983009] env[68279]: DEBUG nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 938.005481] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d4f7bc-9355-44d5-baea-e63610b954d7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.010475] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 938.010793] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.011029] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 938.011296] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.011511] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 938.011725] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 938.012016] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 938.012274] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 938.012515] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 938.012754] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 938.012995] env[68279]: DEBUG nova.virt.hardware [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 938.014140] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e9a383-fdee-4fc6-9b54-c0df6da871ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.024065] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc551ffb-40a1-4c46-b160-0d3eeae36710 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.028716] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f34f80-a719-4af6-822e-809cd8d0849f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.067350] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d47cc4-b65f-427a-b0dd-00a47cc62f63 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.075139] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be89a2b-cbf4-4e10-92bf-9d11e91f1374 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.089272] env[68279]: DEBUG nova.compute.provider_tree [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.134149] env[68279]: INFO nova.compute.manager [-] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Took 1.38 seconds to deallocate network for instance. [ 938.149017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4d12718d-4d44-4a3e-9e5f-9d5a6dabbfb4 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.138s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.191860] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963355, 'name': PowerOnVM_Task, 'duration_secs': 0.536468} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.192216] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.192459] env[68279]: DEBUG nova.compute.manager [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.193257] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae76a1ca-2da2-4929-9376-21c32daf3929 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.219039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.226343] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "2cdd785d-6758-469f-b1f6-266154853f8c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.226584] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.227299] env[68279]: INFO nova.compute.manager [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Rebooting instance [ 938.335682] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 938.335948] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.336064] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 938.336264] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.336411] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 938.336557] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 938.336929] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 938.337482] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 938.337552] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 938.337686] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 938.337865] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 938.343284] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3ab1ed7-0911-48aa-ad56-d35d2d3cc0ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.361941] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 938.361941] env[68279]: value = "task-2963357" [ 938.361941] env[68279]: _type = "Task" [ 938.361941] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.370842] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.431725] env[68279]: INFO nova.compute.manager [-] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Took 1.24 seconds to deallocate network for instance. [ 938.438858] env[68279]: DEBUG nova.compute.manager [req-f2d2e74e-d3bf-41a7-b148-d1e873be5b7b req-baa1d71f-5e50-4485-8d48-2d45ee16a8af service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Received event network-vif-plugged-aa608983-7e53-4df9-8ff2-59e7ad8d5f13 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.439108] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2d2e74e-d3bf-41a7-b148-d1e873be5b7b req-baa1d71f-5e50-4485-8d48-2d45ee16a8af service nova] Acquiring lock "80d881c8-3363-4cf8-bf16-a715d8739335-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.439384] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2d2e74e-d3bf-41a7-b148-d1e873be5b7b req-baa1d71f-5e50-4485-8d48-2d45ee16a8af service nova] Lock "80d881c8-3363-4cf8-bf16-a715d8739335-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.439557] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2d2e74e-d3bf-41a7-b148-d1e873be5b7b req-baa1d71f-5e50-4485-8d48-2d45ee16a8af service nova] Lock "80d881c8-3363-4cf8-bf16-a715d8739335-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.439758] env[68279]: DEBUG nova.compute.manager [req-f2d2e74e-d3bf-41a7-b148-d1e873be5b7b req-baa1d71f-5e50-4485-8d48-2d45ee16a8af service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] No waiting events found dispatching network-vif-plugged-aa608983-7e53-4df9-8ff2-59e7ad8d5f13 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 938.439921] env[68279]: WARNING nova.compute.manager [req-f2d2e74e-d3bf-41a7-b148-d1e873be5b7b req-baa1d71f-5e50-4485-8d48-2d45ee16a8af service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Received unexpected event network-vif-plugged-aa608983-7e53-4df9-8ff2-59e7ad8d5f13 for instance with vm_state building and task_state spawning. [ 938.592747] env[68279]: DEBUG nova.scheduler.client.report [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.615359] env[68279]: DEBUG nova.network.neutron [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Successfully updated port: aa608983-7e53-4df9-8ff2-59e7ad8d5f13 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.641245] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.703183] env[68279]: INFO nova.compute.manager [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] bringing vm to original state: 'stopped' [ 938.749330] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.749330] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquired lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.749330] env[68279]: DEBUG nova.network.neutron [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.872897] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963357, 'name': ReconfigVM_Task, 'duration_secs': 0.216687} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.873246] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance '866eb440-4fc9-4708-8a3b-b53f2be3f6c8' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 938.942687] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.098280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.146s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.100808] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.298s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.101080] env[68279]: DEBUG nova.objects.instance [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lazy-loading 'resources' on Instance uuid d61b2c4f-942a-4e29-8cac-11bc0750605a {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.116826] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "refresh_cache-80d881c8-3363-4cf8-bf16-a715d8739335" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.116983] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "refresh_cache-80d881c8-3363-4cf8-bf16-a715d8739335" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.117169] env[68279]: DEBUG nova.network.neutron [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.129318] env[68279]: INFO nova.network.neutron [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating port 15317896-8bd1-46c4-8fc9-8bf0966392a4 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 939.175957] env[68279]: DEBUG nova.compute.manager [req-eed6d53e-256c-44f5-b797-d69a67c109e9 req-c76d859a-10d4-44b8-97d0-69575862a407 service nova] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Received event network-vif-deleted-16b424ba-6749-431c-bdc5-22c910ad0fe6 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.176224] env[68279]: DEBUG nova.compute.manager [req-eed6d53e-256c-44f5-b797-d69a67c109e9 req-c76d859a-10d4-44b8-97d0-69575862a407 service nova] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Received event network-vif-deleted-d740fd9a-b622-4f9e-b912-7e3d855dc7fa {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.380809] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.381192] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.381231] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.381404] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.381625] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.381687] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.381869] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.382036] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.382210] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.382376] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.382547] env[68279]: DEBUG nova.virt.hardware [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.387798] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Reconfiguring VM instance instance-00000044 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 939.390724] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b852249-b279-4d79-b432-d56dc747789d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.413859] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 939.413859] env[68279]: value = "task-2963358" [ 939.413859] env[68279]: _type = "Task" [ 939.413859] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.424893] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.475848] env[68279]: DEBUG nova.network.neutron [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Updating instance_info_cache with network_info: [{"id": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "address": "fa:16:3e:27:48:99", "network": {"id": "4c637790-40fd-487d-996f-c12eda78c7db", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2010423208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a6f5472e1ab4b2ab95a6c8927281b59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5c5af3-27", "ovs_interfaceid": "3b5c5af3-274c-4169-8f68-a3210a3cdf80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.652502] env[68279]: DEBUG nova.network.neutron [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 939.710600] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.712471] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.712471] env[68279]: DEBUG nova.compute.manager [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.712471] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239c34e3-e99f-408d-8e68-a5cc6dd003db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.725788] env[68279]: DEBUG nova.compute.manager [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 939.783934] env[68279]: DEBUG nova.network.neutron [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Updating instance_info_cache with network_info: [{"id": "aa608983-7e53-4df9-8ff2-59e7ad8d5f13", "address": "fa:16:3e:61:29:01", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa608983-7e", "ovs_interfaceid": "aa608983-7e53-4df9-8ff2-59e7ad8d5f13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.924103] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963358, 'name': ReconfigVM_Task, 'duration_secs': 0.179962} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.926726] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Reconfigured VM instance instance-00000044 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 939.928100] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04662f2f-fbff-47bd-8377-965d82ec744e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.952218] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 866eb440-4fc9-4708-8a3b-b53f2be3f6c8/866eb440-4fc9-4708-8a3b-b53f2be3f6c8.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.955091] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66f1cc42-8c69-4240-98d9-4e5a7116bc6f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.974776] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 939.974776] env[68279]: value = "task-2963359" [ 939.974776] env[68279]: _type = "Task" [ 939.974776] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.980456] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Releasing lock "refresh_cache-2cdd785d-6758-469f-b1f6-266154853f8c" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.990917] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963359, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.103564] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca32dc1d-d56a-41fd-a387-2d2920cfc10d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.112097] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410c722b-2f9a-42e9-9518-e0b4346ed880 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.144524] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8d5784-f597-48bf-a767-950af7d9453c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.152577] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0f17d7-bbda-440b-b946-d88cc4e5477d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.166117] env[68279]: DEBUG nova.compute.provider_tree [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.230958] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.231339] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-440f76d9-600b-4f8d-ab8e-83cf36db723f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.240716] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 940.240716] env[68279]: value = "task-2963360" [ 940.240716] env[68279]: _type = "Task" [ 940.240716] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.250839] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963360, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.287236] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "refresh_cache-80d881c8-3363-4cf8-bf16-a715d8739335" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.287607] env[68279]: DEBUG nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Instance network_info: |[{"id": "aa608983-7e53-4df9-8ff2-59e7ad8d5f13", "address": "fa:16:3e:61:29:01", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa608983-7e", "ovs_interfaceid": "aa608983-7e53-4df9-8ff2-59e7ad8d5f13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 940.288074] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:29:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa608983-7e53-4df9-8ff2-59e7ad8d5f13', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.296046] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.296227] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.296477] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b30481d5-4659-46a0-8bf3-800c7d86b2af {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.317515] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.317515] env[68279]: value = "task-2963361" [ 940.317515] env[68279]: _type = "Task" [ 940.317515] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.325934] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963361, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.466370] env[68279]: DEBUG nova.compute.manager [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Received event network-changed-aa608983-7e53-4df9-8ff2-59e7ad8d5f13 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.466629] env[68279]: DEBUG nova.compute.manager [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Refreshing instance network info cache due to event network-changed-aa608983-7e53-4df9-8ff2-59e7ad8d5f13. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 940.466922] env[68279]: DEBUG oslo_concurrency.lockutils [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] Acquiring lock "refresh_cache-80d881c8-3363-4cf8-bf16-a715d8739335" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.467175] env[68279]: DEBUG oslo_concurrency.lockutils [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] Acquired lock "refresh_cache-80d881c8-3363-4cf8-bf16-a715d8739335" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.467438] env[68279]: DEBUG nova.network.neutron [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Refreshing network info cache for port aa608983-7e53-4df9-8ff2-59e7ad8d5f13 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.487062] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963359, 'name': ReconfigVM_Task, 'duration_secs': 0.297045} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.487729] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 866eb440-4fc9-4708-8a3b-b53f2be3f6c8/866eb440-4fc9-4708-8a3b-b53f2be3f6c8.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.488016] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance '866eb440-4fc9-4708-8a3b-b53f2be3f6c8' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 940.491255] env[68279]: DEBUG nova.compute.manager [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.492048] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b2e682-21c6-4071-b356-10e8fc280770 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.669431] env[68279]: DEBUG nova.scheduler.client.report [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.721195] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.721393] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.721592] env[68279]: DEBUG nova.network.neutron [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.753675] env[68279]: DEBUG oslo_vmware.api [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963360, 'name': PowerOffVM_Task, 'duration_secs': 0.162567} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.753935] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.754145] env[68279]: DEBUG nova.compute.manager [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.754901] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17856b6-7ed3-417e-b206-0c372fc7bef5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.831539] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963361, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.997279] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa51dad7-1315-42a3-a64f-a5051818882e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.020929] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5156b57d-41d0-47f3-9c3d-a0eca3d14dde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.039538] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance '866eb440-4fc9-4708-8a3b-b53f2be3f6c8' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 941.173984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.073s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.177107] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.807s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.177837] env[68279]: INFO nova.compute.claims [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.198378] env[68279]: INFO nova.scheduler.client.report [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Deleted allocations for instance d61b2c4f-942a-4e29-8cac-11bc0750605a [ 941.271774] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.561s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.329315] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963361, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.463814] env[68279]: DEBUG nova.network.neutron [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15317896-8b", "ovs_interfaceid": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.522770] env[68279]: DEBUG nova.network.neutron [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Updated VIF entry in instance network info cache for port aa608983-7e53-4df9-8ff2-59e7ad8d5f13. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.523083] env[68279]: DEBUG nova.network.neutron [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Updating instance_info_cache with network_info: [{"id": "aa608983-7e53-4df9-8ff2-59e7ad8d5f13", "address": "fa:16:3e:61:29:01", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa608983-7e", "ovs_interfaceid": "aa608983-7e53-4df9-8ff2-59e7ad8d5f13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.524756] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b0dd4c-47d2-401d-8e2a-6df9fcfec350 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.533869] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Doing hard reboot of VM {{(pid=68279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 941.534731] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-84f04c07-fc54-4656-8f73-c037484acb68 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.542114] env[68279]: DEBUG oslo_vmware.api [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 941.542114] env[68279]: value = "task-2963362" [ 941.542114] env[68279]: _type = "Task" [ 941.542114] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.556607] env[68279]: DEBUG oslo_vmware.api [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963362, 'name': ResetVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.610161] env[68279]: DEBUG nova.network.neutron [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Port 58be91e3-be6b-4118-8032-e40c6f5f099d binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 941.705609] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1d0fb602-e98e-4810-ac64-8adf68eba0b4 tempest-ServersAdminNegativeTestJSON-1487409741 tempest-ServersAdminNegativeTestJSON-1487409741-project-member] Lock "d61b2c4f-942a-4e29-8cac-11bc0750605a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.739s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.780147] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.832052] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963361, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.966367] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.998249] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='f0a88f53406a71ddc1d7603efa5f2b33',container_format='bare',created_at=2025-03-12T08:47:42Z,direct_url=,disk_format='vmdk',id=8b039f01-d278-40cd-a3c9-5f971fe7f486,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1091557764-shelved',owner='34fd2747aeac4bcd9dd18075cf4ebd8b',properties=ImageMetaProps,protected=,size=31671808,status='active',tags=,updated_at=2025-03-12T08:47:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 941.998572] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.998744] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 941.998936] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.999124] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 941.999304] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 941.999521] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 941.999680] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 941.999845] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.000025] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.000249] env[68279]: DEBUG nova.virt.hardware [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.001134] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580845df-6093-4e67-86d5-894ea9853c26 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.009498] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4911bb5-404f-4605-ba2c-e67e042865f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.025087] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:5f:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15317896-8bd1-46c4-8fc9-8bf0966392a4', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.032766] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 942.033188] env[68279]: DEBUG oslo_concurrency.lockutils [req-96085fa5-f59a-48be-91fb-b3771304e3df req-afaca8f0-b045-4196-af27-70f3577e5d07 service nova] Releasing lock "refresh_cache-80d881c8-3363-4cf8-bf16-a715d8739335" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.033530] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.033767] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87cb5564-0131-4214-8c93-ac4dd3e8c687 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.057580] env[68279]: DEBUG oslo_vmware.api [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963362, 'name': ResetVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.058946] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.058946] env[68279]: value = "task-2963363" [ 942.058946] env[68279]: _type = "Task" [ 942.058946] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.067105] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963363, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.201074] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.201400] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.201605] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "02f34ac7-9deb-4714-92cb-bb507fde1e74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.201856] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.202059] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.204391] env[68279]: INFO nova.compute.manager [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Terminating instance [ 942.333841] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963361, 'name': CreateVM_Task, 'duration_secs': 1.685139} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.334116] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.334999] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.335240] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.335576] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.338336] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9c21ea2-0f90-4741-99d0-5bd7ef6da071 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.344785] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 942.344785] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52003c8a-26e0-a62c-1faa-2477cfc277fe" [ 942.344785] env[68279]: _type = "Task" [ 942.344785] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.360966] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52003c8a-26e0-a62c-1faa-2477cfc277fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.497938] env[68279]: DEBUG nova.compute.manager [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-vif-plugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.498173] env[68279]: DEBUG oslo_concurrency.lockutils [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.500720] env[68279]: DEBUG oslo_concurrency.lockutils [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.500720] env[68279]: DEBUG oslo_concurrency.lockutils [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.500720] env[68279]: DEBUG nova.compute.manager [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] No waiting events found dispatching network-vif-plugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 942.500720] env[68279]: WARNING nova.compute.manager [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received unexpected event network-vif-plugged-15317896-8bd1-46c4-8fc9-8bf0966392a4 for instance with vm_state shelved_offloaded and task_state spawning. [ 942.500720] env[68279]: DEBUG nova.compute.manager [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.500720] env[68279]: DEBUG nova.compute.manager [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing instance network info cache due to event network-changed-15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 942.500720] env[68279]: DEBUG oslo_concurrency.lockutils [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] Acquiring lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.500720] env[68279]: DEBUG oslo_concurrency.lockutils [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] Acquired lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.500720] env[68279]: DEBUG nova.network.neutron [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Refreshing network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.546297] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f7f1f5-9ce8-4012-859e-b84284a332b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.560790] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e686290e-8b43-4556-828a-3f12a15794e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.564592] env[68279]: DEBUG oslo_vmware.api [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963362, 'name': ResetVM_Task, 'duration_secs': 0.51928} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.567411] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Did hard reboot of VM {{(pid=68279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 942.567615] env[68279]: DEBUG nova.compute.manager [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 942.569041] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5805d85a-71f6-4378-a6a7-14ba64fbeeb7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.597952] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cc4536-00b1-4f3d-b3f5-98c8448956bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.606810] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963363, 'name': CreateVM_Task, 'duration_secs': 0.340895} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.607516] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.608188] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.608355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.608724] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.608977] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41201cfb-4518-4248-b8d2-9c45a63e7c47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.613779] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe350ee-2faf-4e01-9de9-b08486c2338c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.626888] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 942.626888] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52989fe8-0099-600b-bcc0-a36cdf748195" [ 942.626888] env[68279]: _type = "Task" [ 942.626888] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.646975] env[68279]: DEBUG nova.compute.provider_tree [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.648443] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.648695] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.648882] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.661039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.661039] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Processing image 8b039f01-d278-40cd-a3c9-5f971fe7f486 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.661039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486/8b039f01-d278-40cd-a3c9-5f971fe7f486.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.661039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486/8b039f01-d278-40cd-a3c9-5f971fe7f486.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.661039] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.661039] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5b6b92a-e143-4ecb-89d7-95d964e040e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.670599] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.670784] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.671752] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd4131ce-2a20-44c3-a360-153e9fa9f250 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.681544] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 942.681544] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526d1403-79c9-3991-3194-dbfb981c0d79" [ 942.681544] env[68279]: _type = "Task" [ 942.681544] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.689682] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526d1403-79c9-3991-3194-dbfb981c0d79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.708923] env[68279]: DEBUG nova.compute.manager [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 942.709209] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 942.710103] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680b544b-d490-46b6-b72b-59b079dc169b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.718359] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.718585] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aac0a035-15dd-4747-89a0-d2cddf13c3e0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.781192] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.781415] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.781619] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleting the datastore file [datastore2] 02f34ac7-9deb-4714-92cb-bb507fde1e74 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.782216] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d9c3186-df08-456c-a3dc-8db3275cda2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.789624] env[68279]: DEBUG oslo_vmware.api [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 942.789624] env[68279]: value = "task-2963365" [ 942.789624] env[68279]: _type = "Task" [ 942.789624] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.797546] env[68279]: DEBUG oslo_vmware.api [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963365, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.856671] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52003c8a-26e0-a62c-1faa-2477cfc277fe, 'name': SearchDatastore_Task, 'duration_secs': 0.015458} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.857142] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.857274] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.857518] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.857701] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.857867] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.858180] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55650954-08c5-49d4-912f-ce89fa6a4aac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.876622] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.876785] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.877562] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d7d00e-3f9b-436f-ad47-cf3842c46c0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.883456] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 942.883456] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520df654-9aad-1596-6c2c-71208f36b9e9" [ 942.883456] env[68279]: _type = "Task" [ 942.883456] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.892168] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520df654-9aad-1596-6c2c-71208f36b9e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.110825] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ee46bd0e-8282-4166-bac6-70c759538408 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.884s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.153356] env[68279]: DEBUG nova.scheduler.client.report [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 943.197758] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Preparing fetch location {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 943.197758] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Fetch image to [datastore1] OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae/OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae.vmdk {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 943.197888] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Downloading stream optimized image 8b039f01-d278-40cd-a3c9-5f971fe7f486 to [datastore1] OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae/OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae.vmdk on the data store datastore1 as vApp {{(pid=68279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 943.198060] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Downloading image file data 8b039f01-d278-40cd-a3c9-5f971fe7f486 to the ESX as VM named 'OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae' {{(pid=68279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 943.295684] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 943.295684] env[68279]: value = "resgroup-9" [ 943.295684] env[68279]: _type = "ResourcePool" [ 943.295684] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 943.295982] env[68279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-efee9674-af6c-4476-a561-0b5f3f23ae5a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.320905] env[68279]: DEBUG oslo_vmware.api [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963365, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157598} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.322457] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.322675] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.322860] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.323045] env[68279]: INFO nova.compute.manager [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Took 0.61 seconds to destroy the instance on the hypervisor. [ 943.323293] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 943.323559] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease: (returnval){ [ 943.323559] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 943.323559] env[68279]: _type = "HttpNfcLease" [ 943.323559] env[68279]: } obtained for vApp import into resource pool (val){ [ 943.323559] env[68279]: value = "resgroup-9" [ 943.323559] env[68279]: _type = "ResourcePool" [ 943.323559] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 943.323765] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the lease: (returnval){ [ 943.323765] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 943.323765] env[68279]: _type = "HttpNfcLease" [ 943.323765] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 943.323944] env[68279]: DEBUG nova.compute.manager [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 943.324048] env[68279]: DEBUG nova.network.neutron [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 943.335195] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 943.335195] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 943.335195] env[68279]: _type = "HttpNfcLease" [ 943.335195] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 943.371846] env[68279]: DEBUG nova.network.neutron [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updated VIF entry in instance network info cache for port 15317896-8bd1-46c4-8fc9-8bf0966392a4. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.372049] env[68279]: DEBUG nova.network.neutron [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [{"id": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "address": "fa:16:3e:24:5f:56", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15317896-8b", "ovs_interfaceid": "15317896-8bd1-46c4-8fc9-8bf0966392a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.395104] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520df654-9aad-1596-6c2c-71208f36b9e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009398} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.398625] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c51c6683-a0f4-49fa-bed4-6ed4bd808f0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.405549] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 943.405549] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5292e8b0-5de8-98b1-07f9-380dc93db39c" [ 943.405549] env[68279]: _type = "Task" [ 943.405549] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.415784] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5292e8b0-5de8-98b1-07f9-380dc93db39c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.608304] env[68279]: DEBUG nova.compute.manager [req-96ec2c6e-294e-40cb-8ab5-70428244a743 req-c407613d-b505-4daa-8638-4532becea7fc service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Received event network-vif-deleted-81c058ad-3832-478e-b2c5-f65692f52164 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.608578] env[68279]: INFO nova.compute.manager [req-96ec2c6e-294e-40cb-8ab5-70428244a743 req-c407613d-b505-4daa-8638-4532becea7fc service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Neutron deleted interface 81c058ad-3832-478e-b2c5-f65692f52164; detaching it from the instance and deleting it from the info cache [ 943.608674] env[68279]: DEBUG nova.network.neutron [req-96ec2c6e-294e-40cb-8ab5-70428244a743 req-c407613d-b505-4daa-8638-4532becea7fc service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.661696] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.662199] env[68279]: DEBUG nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 943.664659] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.556s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.666631] env[68279]: INFO nova.compute.claims [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.700752] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.700979] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.701273] env[68279]: DEBUG nova.network.neutron [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.827073] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "99024851-0add-44b9-a70a-2e242180d6a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.827226] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "99024851-0add-44b9-a70a-2e242180d6a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.839839] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 943.839839] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 943.839839] env[68279]: _type = "HttpNfcLease" [ 943.839839] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 943.875710] env[68279]: DEBUG oslo_concurrency.lockutils [req-40d04eca-1dab-4ef5-ad57-a2d94d544e2f req-f52825a4-e6ad-4d4a-be03-0624e9da7b85 service nova] Releasing lock "refresh_cache-eccc5882-2c8b-456d-bbd2-d9ed22777a77" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.916874] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5292e8b0-5de8-98b1-07f9-380dc93db39c, 'name': SearchDatastore_Task, 'duration_secs': 0.011995} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.917212] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.917474] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 80d881c8-3363-4cf8-bf16-a715d8739335/80d881c8-3363-4cf8-bf16-a715d8739335.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.917668] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe5f3529-e5ee-4792-8f37-919d3be9a39f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.924934] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 943.924934] env[68279]: value = "task-2963367" [ 943.924934] env[68279]: _type = "Task" [ 943.924934] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.934791] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.086882] env[68279]: DEBUG nova.network.neutron [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.110915] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce3357d5-44ff-4e26-9d49-dd4903045e60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.122285] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbc3640-2e61-4889-bf31-16e119f9f021 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.162409] env[68279]: DEBUG nova.compute.manager [req-96ec2c6e-294e-40cb-8ab5-70428244a743 req-c407613d-b505-4daa-8638-4532becea7fc service nova] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Detach interface failed, port_id=81c058ad-3832-478e-b2c5-f65692f52164, reason: Instance 02f34ac7-9deb-4714-92cb-bb507fde1e74 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 944.171721] env[68279]: DEBUG nova.compute.utils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 944.175762] env[68279]: DEBUG nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 944.175762] env[68279]: DEBUG nova.network.neutron [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 944.230643] env[68279]: DEBUG nova.policy [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4003f0b6cdab4315926820ee20a05fac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53d622dfbecc49c0b8445a3453ef88ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 944.335256] env[68279]: DEBUG nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 944.339970] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 944.339970] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 944.339970] env[68279]: _type = "HttpNfcLease" [ 944.339970] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 944.364245] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "2cdd785d-6758-469f-b1f6-266154853f8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.364531] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.364866] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "2cdd785d-6758-469f-b1f6-266154853f8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.364915] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.365095] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.367599] env[68279]: INFO nova.compute.manager [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Terminating instance [ 944.441873] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963367, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.545404] env[68279]: DEBUG nova.network.neutron [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Successfully created port: efecef48-e3c5-47bc-8da4-6227052ad445 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.547773] env[68279]: DEBUG nova.network.neutron [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance_info_cache with network_info: [{"id": "58be91e3-be6b-4118-8032-e40c6f5f099d", "address": "fa:16:3e:79:d1:f2", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58be91e3-be", "ovs_interfaceid": "58be91e3-be6b-4118-8032-e40c6f5f099d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.589409] env[68279]: INFO nova.compute.manager [-] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Took 1.27 seconds to deallocate network for instance. [ 944.676403] env[68279]: DEBUG nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 944.839284] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 944.839284] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 944.839284] env[68279]: _type = "HttpNfcLease" [ 944.839284] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 944.859971] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.873113] env[68279]: DEBUG nova.compute.manager [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 944.873113] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.874063] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acc5697-d401-4e27-99e0-5a14d6d38091 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.882711] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.885803] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60ea15a5-b331-4ae3-a6a4-15673a72e63e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.894033] env[68279]: DEBUG oslo_vmware.api [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 944.894033] env[68279]: value = "task-2963368" [ 944.894033] env[68279]: _type = "Task" [ 944.894033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.902088] env[68279]: DEBUG oslo_vmware.api [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.937734] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963367, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541315} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.940112] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 80d881c8-3363-4cf8-bf16-a715d8739335/80d881c8-3363-4cf8-bf16-a715d8739335.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.940363] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.940773] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fe08a8e-b6be-4b80-ad70-77357e7ddfac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.947384] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 944.947384] env[68279]: value = "task-2963369" [ 944.947384] env[68279]: _type = "Task" [ 944.947384] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.957469] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.050644] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.066943] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1590c5-d705-4690-bf8c-7db8f01a77bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.075172] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d4b5ac-fae5-446a-9828-c92bcd19cbfb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.109369] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.111280] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2bce73-912e-4770-ad00-465d6edb4d18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.120287] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd51c84-3031-4863-88fb-82c4ff666e6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.135071] env[68279]: DEBUG nova.compute.provider_tree [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.339618] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 945.339618] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 945.339618] env[68279]: _type = "HttpNfcLease" [ 945.339618] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 945.404553] env[68279]: DEBUG oslo_vmware.api [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.458592] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066273} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.458833] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.459711] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fd831c-45b7-4c18-a3e9-0f0ddb5d46b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.485161] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 80d881c8-3363-4cf8-bf16-a715d8739335/80d881c8-3363-4cf8-bf16-a715d8739335.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.485161] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fbdd09a-86f0-42f1-931b-75304e9dc13f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.509844] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 945.509844] env[68279]: value = "task-2963370" [ 945.509844] env[68279]: _type = "Task" [ 945.509844] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.520644] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.574183] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1cc2b4-42c1-477f-8614-73d4c8b24c50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.595409] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e23d53-81d7-48d2-ace6-a27e3f7bb20c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.603859] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance '866eb440-4fc9-4708-8a3b-b53f2be3f6c8' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 945.638946] env[68279]: DEBUG nova.scheduler.client.report [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.688173] env[68279]: DEBUG nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 945.717229] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.717523] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.717706] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.717906] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.718071] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.718228] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.718439] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.718597] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.718761] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.718921] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.719113] env[68279]: DEBUG nova.virt.hardware [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.720193] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcfb1f9-6476-41e7-818f-194cb1d7c86b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.729624] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026eac73-7e61-4939-95cf-8f461dd5a79a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.841675] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 945.841675] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 945.841675] env[68279]: _type = "HttpNfcLease" [ 945.841675] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 945.841919] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 945.841919] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52014de0-9c4b-aa9d-1b2d-b71d8aece5ce" [ 945.841919] env[68279]: _type = "HttpNfcLease" [ 945.841919] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 945.842750] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b72f546-673e-4805-b235-b72a78e7e44a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.851511] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd75ef-a77c-4770-6f97-5c705b64435a/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 945.851730] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating HTTP connection to write to file with size = 31671808 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd75ef-a77c-4770-6f97-5c705b64435a/disk-0.vmdk. {{(pid=68279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 945.923023] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-89a3a0dd-937d-45ac-8309-1335eefeb1f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.923722] env[68279]: DEBUG oslo_vmware.api [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963368, 'name': PowerOffVM_Task, 'duration_secs': 0.759618} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.925110] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.925231] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.925857] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffe06e6d-4401-4b4b-be5e-0da2fbcea0c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.981423] env[68279]: DEBUG nova.compute.manager [req-f0c0a17f-435a-4d27-9cb0-4c9afcbd9a1b req-841c3371-414c-4e5f-9124-8c1bc6df7714 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Received event network-vif-plugged-efecef48-e3c5-47bc-8da4-6227052ad445 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 945.981662] env[68279]: DEBUG oslo_concurrency.lockutils [req-f0c0a17f-435a-4d27-9cb0-4c9afcbd9a1b req-841c3371-414c-4e5f-9124-8c1bc6df7714 service nova] Acquiring lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.981871] env[68279]: DEBUG oslo_concurrency.lockutils [req-f0c0a17f-435a-4d27-9cb0-4c9afcbd9a1b req-841c3371-414c-4e5f-9124-8c1bc6df7714 service nova] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.982051] env[68279]: DEBUG oslo_concurrency.lockutils [req-f0c0a17f-435a-4d27-9cb0-4c9afcbd9a1b req-841c3371-414c-4e5f-9124-8c1bc6df7714 service nova] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.982263] env[68279]: DEBUG nova.compute.manager [req-f0c0a17f-435a-4d27-9cb0-4c9afcbd9a1b req-841c3371-414c-4e5f-9124-8c1bc6df7714 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] No waiting events found dispatching network-vif-plugged-efecef48-e3c5-47bc-8da4-6227052ad445 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 945.982388] env[68279]: WARNING nova.compute.manager [req-f0c0a17f-435a-4d27-9cb0-4c9afcbd9a1b req-841c3371-414c-4e5f-9124-8c1bc6df7714 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Received unexpected event network-vif-plugged-efecef48-e3c5-47bc-8da4-6227052ad445 for instance with vm_state building and task_state spawning. [ 945.995650] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.995916] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.996165] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Deleting the datastore file [datastore2] 2cdd785d-6758-469f-b1f6-266154853f8c {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.996466] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebb9b4ad-6cf0-4981-8360-c598d4ee0e1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.004246] env[68279]: DEBUG oslo_vmware.api [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for the task: (returnval){ [ 946.004246] env[68279]: value = "task-2963372" [ 946.004246] env[68279]: _type = "Task" [ 946.004246] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.014510] env[68279]: DEBUG oslo_vmware.api [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963372, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.023599] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963370, 'name': ReconfigVM_Task, 'duration_secs': 0.325768} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.023882] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 80d881c8-3363-4cf8-bf16-a715d8739335/80d881c8-3363-4cf8-bf16-a715d8739335.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.027336] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2a89f6d-4038-40c2-bf95-1fa1cf9ab4e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.032038] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 946.032038] env[68279]: value = "task-2963373" [ 946.032038] env[68279]: _type = "Task" [ 946.032038] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.040505] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963373, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.077602] env[68279]: DEBUG nova.network.neutron [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Successfully updated port: efecef48-e3c5-47bc-8da4-6227052ad445 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 946.110400] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.110767] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81a3725f-c611-4bdc-8386-bbc2da1ce0b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.118857] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 946.118857] env[68279]: value = "task-2963374" [ 946.118857] env[68279]: _type = "Task" [ 946.118857] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.131323] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963374, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.144349] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.144993] env[68279]: DEBUG nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 946.147951] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.230s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.149513] env[68279]: INFO nova.compute.claims [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.515814] env[68279]: DEBUG oslo_vmware.api [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Task: {'id': task-2963372, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123162} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.516147] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.516385] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.516551] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.516727] env[68279]: INFO nova.compute.manager [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Took 1.64 seconds to destroy the instance on the hypervisor. [ 946.516982] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.517251] env[68279]: DEBUG nova.compute.manager [-] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.517353] env[68279]: DEBUG nova.network.neutron [-] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.543044] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963373, 'name': Rename_Task, 'duration_secs': 0.189583} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.543044] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.543234] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cce74ce2-0be2-4b35-a244-fa880aefeaaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.580496] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.580651] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.580800] env[68279]: DEBUG nova.network.neutron [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.587134] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 946.587134] env[68279]: value = "task-2963375" [ 946.587134] env[68279]: _type = "Task" [ 946.587134] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.597380] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963375, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.634719] env[68279]: DEBUG oslo_vmware.api [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963374, 'name': PowerOnVM_Task, 'duration_secs': 0.47593} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.635573] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.635775] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0e53f0-2d39-460b-860c-8e4382be72a9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance '866eb440-4fc9-4708-8a3b-b53f2be3f6c8' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 946.654868] env[68279]: DEBUG nova.compute.utils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.658727] env[68279]: DEBUG nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.658946] env[68279]: DEBUG nova.network.neutron [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.758302] env[68279]: DEBUG nova.policy [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4003f0b6cdab4315926820ee20a05fac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53d622dfbecc49c0b8445a3453ef88ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 947.100441] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963375, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.118582] env[68279]: DEBUG nova.network.neutron [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 947.145241] env[68279]: DEBUG nova.network.neutron [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Successfully created port: 746b72fd-b408-4ee5-94ec-46790f1160f8 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.158983] env[68279]: DEBUG nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 947.184908] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Completed reading data from the image iterator. {{(pid=68279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 947.185254] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd75ef-a77c-4770-6f97-5c705b64435a/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 947.186740] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb659e1-5089-481e-8965-71b588f2c449 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.197658] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd75ef-a77c-4770-6f97-5c705b64435a/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 947.197658] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd75ef-a77c-4770-6f97-5c705b64435a/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 947.197970] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c14eb1ac-9375-4890-8860-bf14b0f75b22 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.279101] env[68279]: DEBUG nova.network.neutron [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Updating instance_info_cache with network_info: [{"id": "efecef48-e3c5-47bc-8da4-6227052ad445", "address": "fa:16:3e:30:2c:ea", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefecef48-e3", "ovs_interfaceid": "efecef48-e3c5-47bc-8da4-6227052ad445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.289092] env[68279]: DEBUG nova.network.neutron [-] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.585915] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14bc6388-6993-42ff-afc3-e13a3cfb7fe2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.597700] env[68279]: DEBUG oslo_vmware.api [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963375, 'name': PowerOnVM_Task, 'duration_secs': 0.558726} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.599772] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.599982] env[68279]: INFO nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Took 9.62 seconds to spawn the instance on the hypervisor. [ 947.600184] env[68279]: DEBUG nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.600998] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2e4592-2a7f-4bef-a2da-5907ea9b5127 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.604061] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f9402a-4d7d-4dec-bd8b-6a4d800adb14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.640892] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ed9ff4-bc01-411b-9604-61c02c82261a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.651326] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017dc583-4006-4f75-98f6-a57e4a4c8f1c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.674020] env[68279]: DEBUG nova.compute.provider_tree [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.783615] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.783942] env[68279]: DEBUG nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Instance network_info: |[{"id": "efecef48-e3c5-47bc-8da4-6227052ad445", "address": "fa:16:3e:30:2c:ea", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefecef48-e3", "ovs_interfaceid": "efecef48-e3c5-47bc-8da4-6227052ad445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 947.784385] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:2c:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53915f38-d7a0-42ec-8b30-1eacfb2cc379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efecef48-e3c5-47bc-8da4-6227052ad445', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.792037] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Creating folder: Project (53d622dfbecc49c0b8445a3453ef88ae). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.792301] env[68279]: INFO nova.compute.manager [-] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Took 1.27 seconds to deallocate network for instance. [ 947.792532] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c607272b-bd34-4ff9-be95-a4e7ccf23c2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.808028] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Created folder: Project (53d622dfbecc49c0b8445a3453ef88ae) in parent group-v594445. [ 947.808170] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Creating folder: Instances. Parent ref: group-v594665. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.808464] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61d344e5-c266-457a-a218-e19958c19e81 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.819734] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Created folder: Instances in parent group-v594665. [ 947.819993] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.820220] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.820437] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3807c316-2ae1-4299-a6a9-e0060f594938 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.842946] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.842946] env[68279]: value = "task-2963378" [ 947.842946] env[68279]: _type = "Task" [ 947.842946] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.852357] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963378, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.892926] env[68279]: DEBUG oslo_vmware.rw_handles [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd75ef-a77c-4770-6f97-5c705b64435a/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 947.893165] env[68279]: INFO nova.virt.vmwareapi.images [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Downloaded image file data 8b039f01-d278-40cd-a3c9-5f971fe7f486 [ 947.894088] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31866444-1820-46a0-8e68-e863c0ac8461 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.916352] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-832431c8-c8ae-424d-876b-108a5b60baa0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.951015] env[68279]: INFO nova.virt.vmwareapi.images [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] The imported VM was unregistered [ 947.953029] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Caching image {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 947.953181] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating directory with path [datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486 {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.953425] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ef6e34f-c0fd-49a6-9687-0ff1d0c56f4d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.969540] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created directory with path [datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486 {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 947.969754] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae/OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae.vmdk to [datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486/8b039f01-d278-40cd-a3c9-5f971fe7f486.vmdk. {{(pid=68279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 947.970063] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5f3837b1-7385-4d99-8af0-f79a0e43efd6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.978962] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 947.978962] env[68279]: value = "task-2963380" [ 947.978962] env[68279]: _type = "Task" [ 947.978962] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.987984] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963380, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.139569] env[68279]: DEBUG nova.compute.manager [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Received event network-changed-efecef48-e3c5-47bc-8da4-6227052ad445 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 948.139569] env[68279]: DEBUG nova.compute.manager [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Refreshing instance network info cache due to event network-changed-efecef48-e3c5-47bc-8da4-6227052ad445. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 948.139569] env[68279]: DEBUG oslo_concurrency.lockutils [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] Acquiring lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.139569] env[68279]: DEBUG oslo_concurrency.lockutils [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] Acquired lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.139569] env[68279]: DEBUG nova.network.neutron [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Refreshing network info cache for port efecef48-e3c5-47bc-8da4-6227052ad445 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.148151] env[68279]: INFO nova.compute.manager [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Took 29.25 seconds to build instance. [ 948.173059] env[68279]: DEBUG nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 948.176287] env[68279]: DEBUG nova.scheduler.client.report [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.203324] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 948.203580] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.203746] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 948.203925] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.204092] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 948.204247] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 948.204457] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 948.204612] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 948.204773] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 948.204950] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 948.205244] env[68279]: DEBUG nova.virt.hardware [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 948.206460] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a05a2d-cff2-4ee5-b405-063b986dc858 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.217582] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec2c735-21a0-4ae4-a005-d54346fdb804 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.300986] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.355114] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963378, 'name': CreateVM_Task, 'duration_secs': 0.421783} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.355392] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.356206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.356479] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.356853] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.357214] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d395eac6-8fd4-4815-9c1a-d7be2eea446b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.365045] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 948.365045] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a8393-e0f7-8f73-537b-27c6ea7e38fd" [ 948.365045] env[68279]: _type = "Task" [ 948.365045] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.374530] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a8393-e0f7-8f73-537b-27c6ea7e38fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.495085] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963380, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.555077] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.555372] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.555956] env[68279]: DEBUG nova.compute.manager [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Going to confirm migration 3 {{(pid=68279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 948.649733] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a11cbc33-8178-40a3-9c86-5d621b69885c tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "80d881c8-3363-4cf8-bf16-a715d8739335" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.755s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.690062] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.690062] env[68279]: DEBUG nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 948.692499] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.168s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.692499] env[68279]: DEBUG nova.objects.instance [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lazy-loading 'resources' on Instance uuid 19f693cd-b598-432d-acf5-64da9f640d5e {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.695793] env[68279]: DEBUG nova.compute.manager [req-293bad05-91c1-47ac-98e5-640b48682595 req-f0049e07-532e-48f0-95d0-a0b6805e33cd service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Received event network-vif-plugged-746b72fd-b408-4ee5-94ec-46790f1160f8 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 948.696480] env[68279]: DEBUG oslo_concurrency.lockutils [req-293bad05-91c1-47ac-98e5-640b48682595 req-f0049e07-532e-48f0-95d0-a0b6805e33cd service nova] Acquiring lock "e0afa3e5-4a40-4257-851c-3cccf57b1724-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.696480] env[68279]: DEBUG oslo_concurrency.lockutils [req-293bad05-91c1-47ac-98e5-640b48682595 req-f0049e07-532e-48f0-95d0-a0b6805e33cd service nova] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.696480] env[68279]: DEBUG oslo_concurrency.lockutils [req-293bad05-91c1-47ac-98e5-640b48682595 req-f0049e07-532e-48f0-95d0-a0b6805e33cd service nova] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.696736] env[68279]: DEBUG nova.compute.manager [req-293bad05-91c1-47ac-98e5-640b48682595 req-f0049e07-532e-48f0-95d0-a0b6805e33cd service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] No waiting events found dispatching network-vif-plugged-746b72fd-b408-4ee5-94ec-46790f1160f8 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.697802] env[68279]: WARNING nova.compute.manager [req-293bad05-91c1-47ac-98e5-640b48682595 req-f0049e07-532e-48f0-95d0-a0b6805e33cd service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Received unexpected event network-vif-plugged-746b72fd-b408-4ee5-94ec-46790f1160f8 for instance with vm_state building and task_state spawning. [ 948.882017] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529a8393-e0f7-8f73-537b-27c6ea7e38fd, 'name': SearchDatastore_Task, 'duration_secs': 0.021739} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.882017] env[68279]: DEBUG nova.network.neutron [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Successfully updated port: 746b72fd-b408-4ee5-94ec-46790f1160f8 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.882915] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.883241] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.883482] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.883623] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.883799] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.884345] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f629ba5-1bdb-4f2b-83d0-604d17e36c7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.893846] env[68279]: DEBUG nova.network.neutron [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Updated VIF entry in instance network info cache for port efecef48-e3c5-47bc-8da4-6227052ad445. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 948.894021] env[68279]: DEBUG nova.network.neutron [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Updating instance_info_cache with network_info: [{"id": "efecef48-e3c5-47bc-8da4-6227052ad445", "address": "fa:16:3e:30:2c:ea", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefecef48-e3", "ovs_interfaceid": "efecef48-e3c5-47bc-8da4-6227052ad445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.897028] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.897216] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.898871] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-301ff651-37aa-4c1a-8f48-66579154e340 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.907947] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 948.907947] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b39a2d-ae06-e8aa-b819-a11ae02651e7" [ 948.907947] env[68279]: _type = "Task" [ 948.907947] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.919842] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b39a2d-ae06-e8aa-b819-a11ae02651e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.996135] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963380, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.165203] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.165203] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.165203] env[68279]: DEBUG nova.network.neutron [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.165203] env[68279]: DEBUG nova.objects.instance [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lazy-loading 'info_cache' on Instance uuid 866eb440-4fc9-4708-8a3b-b53f2be3f6c8 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.198936] env[68279]: DEBUG nova.compute.utils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 949.204026] env[68279]: DEBUG nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 949.204243] env[68279]: DEBUG nova.network.neutron [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.247967] env[68279]: DEBUG nova.policy [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4003f0b6cdab4315926820ee20a05fac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53d622dfbecc49c0b8445a3453ef88ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 949.385585] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "refresh_cache-e0afa3e5-4a40-4257-851c-3cccf57b1724" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.385585] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "refresh_cache-e0afa3e5-4a40-4257-851c-3cccf57b1724" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.385730] env[68279]: DEBUG nova.network.neutron [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.396545] env[68279]: DEBUG oslo_concurrency.lockutils [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] Releasing lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.396807] env[68279]: DEBUG nova.compute.manager [req-82800ad1-f285-40f2-83a4-f7e5703db61a req-7c293d2f-e0cd-4158-a5eb-759e0c8d5a65 service nova] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Received event network-vif-deleted-3b5c5af3-274c-4169-8f68-a3210a3cdf80 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.421749] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b39a2d-ae06-e8aa-b819-a11ae02651e7, 'name': SearchDatastore_Task, 'duration_secs': 0.015446} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.422652] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9ba3cc5-80ae-4a3a-8b83-157a6be50511 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.434366] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 949.434366] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5276b481-070e-5c4d-2e82-48e7a26c6c1f" [ 949.434366] env[68279]: _type = "Task" [ 949.434366] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.447129] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5276b481-070e-5c4d-2e82-48e7a26c6c1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.494270] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963380, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.518984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "80d881c8-3363-4cf8-bf16-a715d8739335" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.518984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "80d881c8-3363-4cf8-bf16-a715d8739335" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.518984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "80d881c8-3363-4cf8-bf16-a715d8739335-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.518984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "80d881c8-3363-4cf8-bf16-a715d8739335-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.519288] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "80d881c8-3363-4cf8-bf16-a715d8739335-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.521292] env[68279]: INFO nova.compute.manager [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Terminating instance [ 949.536163] env[68279]: DEBUG nova.network.neutron [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Successfully created port: 89c7e747-362e-4106-8694-7df62edf0bbd {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 949.633777] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76787ee-fdcb-402b-9091-9b582a60896a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.644547] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b04543-3933-446d-9899-39bea82da3b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.681250] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb559288-88a9-4afb-bb0b-5fb342149064 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.691024] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9a09d3-b1a7-449c-942a-d1468699981c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.708902] env[68279]: DEBUG nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 949.712357] env[68279]: DEBUG nova.compute.provider_tree [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.929903] env[68279]: DEBUG nova.network.neutron [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.947350] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5276b481-070e-5c4d-2e82-48e7a26c6c1f, 'name': SearchDatastore_Task, 'duration_secs': 0.015603} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.947715] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.947913] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e/a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.950777] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-211e4430-9632-4c55-87df-3b4e1af254c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.961237] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 949.961237] env[68279]: value = "task-2963381" [ 949.961237] env[68279]: _type = "Task" [ 949.961237] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.974291] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.993904] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963380, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.027455] env[68279]: DEBUG nova.compute.manager [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 950.027663] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.028624] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72c19d0-b8d5-4ca9-8497-809a9ed95e50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.039231] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.039512] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcbe6cf2-1575-46b3-9e84-2c7bda84e682 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.053353] env[68279]: DEBUG oslo_vmware.api [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 950.053353] env[68279]: value = "task-2963382" [ 950.053353] env[68279]: _type = "Task" [ 950.053353] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.067225] env[68279]: DEBUG oslo_vmware.api [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.185603] env[68279]: DEBUG nova.network.neutron [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Updating instance_info_cache with network_info: [{"id": "746b72fd-b408-4ee5-94ec-46790f1160f8", "address": "fa:16:3e:0c:55:a7", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap746b72fd-b4", "ovs_interfaceid": "746b72fd-b408-4ee5-94ec-46790f1160f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.223358] env[68279]: DEBUG nova.scheduler.client.report [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.450819] env[68279]: DEBUG nova.network.neutron [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance_info_cache with network_info: [{"id": "58be91e3-be6b-4118-8032-e40c6f5f099d", "address": "fa:16:3e:79:d1:f2", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58be91e3-be", "ovs_interfaceid": "58be91e3-be6b-4118-8032-e40c6f5f099d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.473937] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963381, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.492430] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963380, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.563621] env[68279]: DEBUG oslo_vmware.api [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963382, 'name': PowerOffVM_Task, 'duration_secs': 0.235003} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.563894] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.564124] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.564470] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-008abdc3-3db3-412b-83d5-d8c0fea9f111 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.631371] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.631599] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.631777] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleting the datastore file [datastore2] 80d881c8-3363-4cf8-bf16-a715d8739335 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.632057] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-485b40d5-45fb-4e38-9424-0db87ecfeee3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.638296] env[68279]: DEBUG oslo_vmware.api [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 950.638296] env[68279]: value = "task-2963384" [ 950.638296] env[68279]: _type = "Task" [ 950.638296] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.646194] env[68279]: DEBUG oslo_vmware.api [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.688940] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "refresh_cache-e0afa3e5-4a40-4257-851c-3cccf57b1724" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.689401] env[68279]: DEBUG nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Instance network_info: |[{"id": "746b72fd-b408-4ee5-94ec-46790f1160f8", "address": "fa:16:3e:0c:55:a7", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap746b72fd-b4", "ovs_interfaceid": "746b72fd-b408-4ee5-94ec-46790f1160f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 950.689864] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:55:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53915f38-d7a0-42ec-8b30-1eacfb2cc379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '746b72fd-b408-4ee5-94ec-46790f1160f8', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.698341] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.698567] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.698795] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ac60ab5-c057-44c2-b01f-304b58e2bc02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.715070] env[68279]: DEBUG nova.compute.manager [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Received event network-changed-746b72fd-b408-4ee5-94ec-46790f1160f8 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.715262] env[68279]: DEBUG nova.compute.manager [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Refreshing instance network info cache due to event network-changed-746b72fd-b408-4ee5-94ec-46790f1160f8. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 950.715476] env[68279]: DEBUG oslo_concurrency.lockutils [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] Acquiring lock "refresh_cache-e0afa3e5-4a40-4257-851c-3cccf57b1724" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.715619] env[68279]: DEBUG oslo_concurrency.lockutils [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] Acquired lock "refresh_cache-e0afa3e5-4a40-4257-851c-3cccf57b1724" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.715777] env[68279]: DEBUG nova.network.neutron [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Refreshing network info cache for port 746b72fd-b408-4ee5-94ec-46790f1160f8 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.723383] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.723383] env[68279]: value = "task-2963385" [ 950.723383] env[68279]: _type = "Task" [ 950.723383] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.732783] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963385, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.738261] env[68279]: DEBUG nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 950.740810] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.049s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.742887] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.720s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.744276] env[68279]: INFO nova.compute.claims [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.767913] env[68279]: INFO nova.scheduler.client.report [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleted allocations for instance 19f693cd-b598-432d-acf5-64da9f640d5e [ 950.781047] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 950.781283] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 950.781441] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 950.781621] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 950.781765] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 950.781912] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 950.782125] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 950.782287] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 950.782456] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 950.782618] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 950.782788] env[68279]: DEBUG nova.virt.hardware [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 950.783655] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0efc2a-ca75-4f01-8b60-53c3a29fb9a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.794036] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecabb03-84d9-4213-adc2-61e4582bcc17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.955602] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-866eb440-4fc9-4708-8a3b-b53f2be3f6c8" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.955602] env[68279]: DEBUG nova.objects.instance [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lazy-loading 'migration_context' on Instance uuid 866eb440-4fc9-4708-8a3b-b53f2be3f6c8 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.959349] env[68279]: DEBUG nova.compute.manager [req-efefd38c-1f36-4a0f-89d1-d2f73d11045d req-f24baaa2-c074-4a10-8f8b-ae08805d4ac2 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Received event network-vif-plugged-89c7e747-362e-4106-8694-7df62edf0bbd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.959575] env[68279]: DEBUG oslo_concurrency.lockutils [req-efefd38c-1f36-4a0f-89d1-d2f73d11045d req-f24baaa2-c074-4a10-8f8b-ae08805d4ac2 service nova] Acquiring lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.959780] env[68279]: DEBUG oslo_concurrency.lockutils [req-efefd38c-1f36-4a0f-89d1-d2f73d11045d req-f24baaa2-c074-4a10-8f8b-ae08805d4ac2 service nova] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.959943] env[68279]: DEBUG oslo_concurrency.lockutils [req-efefd38c-1f36-4a0f-89d1-d2f73d11045d req-f24baaa2-c074-4a10-8f8b-ae08805d4ac2 service nova] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.960148] env[68279]: DEBUG nova.compute.manager [req-efefd38c-1f36-4a0f-89d1-d2f73d11045d req-f24baaa2-c074-4a10-8f8b-ae08805d4ac2 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] No waiting events found dispatching network-vif-plugged-89c7e747-362e-4106-8694-7df62edf0bbd {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 950.960325] env[68279]: WARNING nova.compute.manager [req-efefd38c-1f36-4a0f-89d1-d2f73d11045d req-f24baaa2-c074-4a10-8f8b-ae08805d4ac2 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Received unexpected event network-vif-plugged-89c7e747-362e-4106-8694-7df62edf0bbd for instance with vm_state building and task_state spawning. [ 950.972835] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598541} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.973068] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e/a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.973280] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.973520] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f46f2f24-6687-49c4-a3e7-3832c4a3a992 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.981598] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 950.981598] env[68279]: value = "task-2963386" [ 950.981598] env[68279]: _type = "Task" [ 950.981598] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.995820] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.998652] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963380, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.60658} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.998892] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae/OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae.vmdk to [datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486/8b039f01-d278-40cd-a3c9-5f971fe7f486.vmdk. [ 950.999096] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Cleaning up location [datastore1] OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 950.999283] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_2acc91e6-b3f8-45ee-822b-d7cdb52383ae {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.999536] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e140be6-7f75-4df0-a3a6-778874b38a6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.007121] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 951.007121] env[68279]: value = "task-2963387" [ 951.007121] env[68279]: _type = "Task" [ 951.007121] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.015600] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.057906] env[68279]: DEBUG nova.network.neutron [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Successfully updated port: 89c7e747-362e-4106-8694-7df62edf0bbd {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 951.148230] env[68279]: DEBUG oslo_vmware.api [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207559} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.148494] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.148678] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.148855] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.149417] env[68279]: INFO nova.compute.manager [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Took 1.12 seconds to destroy the instance on the hypervisor. [ 951.149417] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.150166] env[68279]: DEBUG nova.compute.manager [-] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.150166] env[68279]: DEBUG nova.network.neutron [-] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.234946] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963385, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.278162] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3c37c5a3-1ecc-4b9e-b46f-54d36ab32aee tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "19f693cd-b598-432d-acf5-64da9f640d5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.725s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.458393] env[68279]: DEBUG nova.objects.base [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Object Instance<866eb440-4fc9-4708-8a3b-b53f2be3f6c8> lazy-loaded attributes: info_cache,migration_context {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 951.459918] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1450fa-3674-4d2e-a360-c921f5f25c75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.483339] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43c51dd5-3066-4cf0-a7a4-64253d8305b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.492979] env[68279]: DEBUG oslo_vmware.api [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 951.492979] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52744c3e-1373-413e-2e70-e83223150271" [ 951.492979] env[68279]: _type = "Task" [ 951.492979] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.496040] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068029} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.499128] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.499907] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d8874d-abd7-41ec-9825-9cfaecb2aa96 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.508671] env[68279]: DEBUG oslo_vmware.api [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52744c3e-1373-413e-2e70-e83223150271, 'name': SearchDatastore_Task, 'duration_secs': 0.008334} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.520392] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.528728] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e/a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.530320] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf106d88-ff83-42ab-928c-7390294e1369 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.545645] env[68279]: DEBUG nova.network.neutron [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Updated VIF entry in instance network info cache for port 746b72fd-b408-4ee5-94ec-46790f1160f8. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.545869] env[68279]: DEBUG nova.network.neutron [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Updating instance_info_cache with network_info: [{"id": "746b72fd-b408-4ee5-94ec-46790f1160f8", "address": "fa:16:3e:0c:55:a7", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap746b72fd-b4", "ovs_interfaceid": "746b72fd-b408-4ee5-94ec-46790f1160f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.547576] env[68279]: DEBUG nova.network.neutron [-] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.554623] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04538} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.555972] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.556186] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486/8b039f01-d278-40cd-a3c9-5f971fe7f486.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.556455] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486/8b039f01-d278-40cd-a3c9-5f971fe7f486.vmdk to [datastore1] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.556849] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 951.556849] env[68279]: value = "task-2963388" [ 951.556849] env[68279]: _type = "Task" [ 951.556849] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.558893] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08fd615d-86fc-4ec2-a6f2-4217d47dcbce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.560461] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "refresh_cache-5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.560682] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "refresh_cache-5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.560746] env[68279]: DEBUG nova.network.neutron [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 951.572340] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963388, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.574012] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 951.574012] env[68279]: value = "task-2963389" [ 951.574012] env[68279]: _type = "Task" [ 951.574012] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.584701] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963389, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.733530] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963385, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.050754] env[68279]: DEBUG oslo_concurrency.lockutils [req-82dd4cb7-c8c7-4bec-8d76-c00ae94ffa78 req-22e1e84c-b2c1-43fc-bf98-b4230eb1752e service nova] Releasing lock "refresh_cache-e0afa3e5-4a40-4257-851c-3cccf57b1724" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.051295] env[68279]: INFO nova.compute.manager [-] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Took 0.90 seconds to deallocate network for instance. [ 952.073827] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963388, 'name': ReconfigVM_Task, 'duration_secs': 0.312866} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.075244] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Reconfigured VM instance instance-0000004d to attach disk [datastore2] a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e/a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.075244] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fecfae2-1d19-4c60-9277-4c522614c3a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.089989] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963389, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.092824] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 952.092824] env[68279]: value = "task-2963390" [ 952.092824] env[68279]: _type = "Task" [ 952.092824] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.109614] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963390, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.110543] env[68279]: DEBUG nova.network.neutron [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 952.157536] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b527fb-2c0b-43cb-97e0-e6561fbdabe1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.174965] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db29a5d1-3142-41c9-accc-4282bc002224 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.213817] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e47aff-f3ed-470f-8d28-243dedaaeed5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.225148] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5928083-c402-4faa-8006-33d7415ec52a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.239041] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963385, 'name': CreateVM_Task, 'duration_secs': 1.012858} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.246733] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.247323] env[68279]: DEBUG nova.compute.provider_tree [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.249396] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.249483] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.249766] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 952.250587] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f933144-d30b-4185-a97a-95d16c216bf4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.259109] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 952.259109] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e65b6-b50d-80c9-4303-aaf13b875a1f" [ 952.259109] env[68279]: _type = "Task" [ 952.259109] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.273768] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e65b6-b50d-80c9-4303-aaf13b875a1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.298853] env[68279]: DEBUG nova.network.neutron [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Updating instance_info_cache with network_info: [{"id": "89c7e747-362e-4106-8694-7df62edf0bbd", "address": "fa:16:3e:fd:a6:b0", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c7e747-36", "ovs_interfaceid": "89c7e747-362e-4106-8694-7df62edf0bbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.561569] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.587682] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963389, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.605035] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963390, 'name': Rename_Task, 'duration_secs': 0.208661} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.605420] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.605695] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-243720a5-90a9-4944-8112-9f8acb4bc035 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.615313] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 952.615313] env[68279]: value = "task-2963391" [ 952.615313] env[68279]: _type = "Task" [ 952.615313] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.626036] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.751467] env[68279]: DEBUG nova.scheduler.client.report [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 952.773448] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520e65b6-b50d-80c9-4303-aaf13b875a1f, 'name': SearchDatastore_Task, 'duration_secs': 0.014965} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.773773] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.774024] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.774260] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.774407] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.774588] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.774865] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ac88093-54a2-4fd0-8584-17a18252e37c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.787386] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.787583] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.788357] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-624c87e8-d32c-4e9a-9e2d-502598a42dfc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.798506] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 952.798506] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5258efc4-3825-57e6-bde8-7bb032d94f34" [ 952.798506] env[68279]: _type = "Task" [ 952.798506] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.801960] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "refresh_cache-5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.802286] env[68279]: DEBUG nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Instance network_info: |[{"id": "89c7e747-362e-4106-8694-7df62edf0bbd", "address": "fa:16:3e:fd:a6:b0", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c7e747-36", "ovs_interfaceid": "89c7e747-362e-4106-8694-7df62edf0bbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 952.802676] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:a6:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53915f38-d7a0-42ec-8b30-1eacfb2cc379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89c7e747-362e-4106-8694-7df62edf0bbd', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 952.810172] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 952.811167] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 952.811398] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb414243-185a-4cf9-bfc1-be499d07c926 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.830576] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5258efc4-3825-57e6-bde8-7bb032d94f34, 'name': SearchDatastore_Task, 'duration_secs': 0.01333} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.831951] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4614656-ed81-4ffd-b894-b9628b050366 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.836933] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 952.836933] env[68279]: value = "task-2963392" [ 952.836933] env[68279]: _type = "Task" [ 952.836933] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.842143] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 952.842143] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521019f4-6a90-cb1d-e5c6-cc8b78e11d55" [ 952.842143] env[68279]: _type = "Task" [ 952.842143] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.849176] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963392, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.858565] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521019f4-6a90-cb1d-e5c6-cc8b78e11d55, 'name': SearchDatastore_Task, 'duration_secs': 0.014668} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.858873] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.859210] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] e0afa3e5-4a40-4257-851c-3cccf57b1724/e0afa3e5-4a40-4257-851c-3cccf57b1724.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.859529] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e21b498-6c18-47be-828c-b233cf1ec7a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.869439] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 952.869439] env[68279]: value = "task-2963393" [ 952.869439] env[68279]: _type = "Task" [ 952.869439] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.881686] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963393, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.990694] env[68279]: DEBUG nova.compute.manager [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Received event network-changed-89c7e747-362e-4106-8694-7df62edf0bbd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 952.991062] env[68279]: DEBUG nova.compute.manager [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Refreshing instance network info cache due to event network-changed-89c7e747-362e-4106-8694-7df62edf0bbd. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 952.991431] env[68279]: DEBUG oslo_concurrency.lockutils [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] Acquiring lock "refresh_cache-5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.991637] env[68279]: DEBUG oslo_concurrency.lockutils [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] Acquired lock "refresh_cache-5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.991937] env[68279]: DEBUG nova.network.neutron [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Refreshing network info cache for port 89c7e747-362e-4106-8694-7df62edf0bbd {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.090533] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963389, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.134733] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963391, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.257106] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.257686] env[68279]: DEBUG nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 953.260804] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.632s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.261087] env[68279]: DEBUG nova.objects.instance [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lazy-loading 'resources' on Instance uuid 50e08259-7915-49bb-b137-5cc6e9d53c16 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.351431] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963392, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.384040] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963393, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.589678] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963389, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.633419] env[68279]: DEBUG oslo_vmware.api [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963391, 'name': PowerOnVM_Task, 'duration_secs': 0.566761} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.633750] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.633956] env[68279]: INFO nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Took 7.95 seconds to spawn the instance on the hypervisor. [ 953.634156] env[68279]: DEBUG nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.635371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f79a9c-98d9-49e5-9fe7-9375b304ddd9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.760959] env[68279]: DEBUG nova.network.neutron [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Updated VIF entry in instance network info cache for port 89c7e747-362e-4106-8694-7df62edf0bbd. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 953.761179] env[68279]: DEBUG nova.network.neutron [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Updating instance_info_cache with network_info: [{"id": "89c7e747-362e-4106-8694-7df62edf0bbd", "address": "fa:16:3e:fd:a6:b0", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c7e747-36", "ovs_interfaceid": "89c7e747-362e-4106-8694-7df62edf0bbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.764220] env[68279]: DEBUG nova.compute.utils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 953.768351] env[68279]: DEBUG nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 953.768616] env[68279]: DEBUG nova.network.neutron [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 953.812393] env[68279]: DEBUG nova.policy [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4d9c39cb0a142eab4370307dd41cf0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd517424aba641e4b867e440ba0ee7ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 953.858711] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963392, 'name': CreateVM_Task, 'duration_secs': 0.740105} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.862023] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 953.863992] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.864970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.864970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 953.865744] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b86b81a1-d6b2-451d-8a51-119293a9ba53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.877885] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 953.877885] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522672f2-9038-6365-533d-72f6eb8a9b01" [ 953.877885] env[68279]: _type = "Task" [ 953.877885] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.887226] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963393, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.693526} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.888178] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] e0afa3e5-4a40-4257-851c-3cccf57b1724/e0afa3e5-4a40-4257-851c-3cccf57b1724.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.888265] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.891260] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66f694f0-9d55-42d7-9828-f15d27742781 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.897022] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522672f2-9038-6365-533d-72f6eb8a9b01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.903197] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 953.903197] env[68279]: value = "task-2963394" [ 953.903197] env[68279]: _type = "Task" [ 953.903197] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.915574] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.089985] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963389, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.442154} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.093252] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8b039f01-d278-40cd-a3c9-5f971fe7f486/8b039f01-d278-40cd-a3c9-5f971fe7f486.vmdk to [datastore1] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.094542] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab001b03-b83a-4a76-8a43-a93bd1d58ba2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.133338] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.137279] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5759f5a2-3ac4-423d-b87d-ba8aa89e351a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.162283] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 954.162283] env[68279]: value = "task-2963395" [ 954.162283] env[68279]: _type = "Task" [ 954.162283] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.170555] env[68279]: INFO nova.compute.manager [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Took 27.82 seconds to build instance. [ 954.178482] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.209546] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82447e0a-87c0-48d3-96da-62b71c11400d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.217747] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127f5fa4-7adb-4c2d-8564-52f92e78c3e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.250404] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e282a65-01c8-42d6-920a-fb1e751df4e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.258411] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70facffe-4473-4b02-9bff-ad4c58c106e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.263900] env[68279]: DEBUG oslo_concurrency.lockutils [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] Releasing lock "refresh_cache-5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.264164] env[68279]: DEBUG nova.compute.manager [req-e9990e3e-0463-4a35-91ce-0f9bd08cb5dd req-fc91b87b-6473-4a6e-a8e3-cfa976bdcc97 service nova] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Received event network-vif-deleted-aa608983-7e53-4df9-8ff2-59e7ad8d5f13 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.272151] env[68279]: DEBUG nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 954.278020] env[68279]: DEBUG nova.compute.provider_tree [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.347031] env[68279]: DEBUG nova.network.neutron [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Successfully created port: f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 954.394384] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522672f2-9038-6365-533d-72f6eb8a9b01, 'name': SearchDatastore_Task, 'duration_secs': 0.021029} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.394872] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.395253] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.398326] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.398326] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.398326] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.398326] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f94f6973-9c45-4484-8125-974411333cba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.410741] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.411927] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.415202] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29216678-b47a-4201-9ebd-bf88ad545c76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.418654] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072449} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.419028] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.420670] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a220f6d-71ef-46b6-85ca-79994f21c7f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.425964] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 954.425964] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52686042-fbaa-4a60-9ae1-c2baec007ebf" [ 954.425964] env[68279]: _type = "Task" [ 954.425964] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.457970] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] e0afa3e5-4a40-4257-851c-3cccf57b1724/e0afa3e5-4a40-4257-851c-3cccf57b1724.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.458783] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f1ca25d-4f12-468e-8869-89400e768e0a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.477529] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52686042-fbaa-4a60-9ae1-c2baec007ebf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.484953] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 954.484953] env[68279]: value = "task-2963396" [ 954.484953] env[68279]: _type = "Task" [ 954.484953] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.495298] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963396, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.674311] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9dacd3ee-ac68-45e6-8d3e-945cbe865f44 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.329s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.674646] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.781949] env[68279]: DEBUG nova.scheduler.client.report [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.938361] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52686042-fbaa-4a60-9ae1-c2baec007ebf, 'name': SearchDatastore_Task, 'duration_secs': 0.040216} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.939236] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8716e126-d1ea-4ac8-ad70-7732bb67f550 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.945393] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 954.945393] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5261882c-1b92-6bfb-8b0e-d002240d3f4a" [ 954.945393] env[68279]: _type = "Task" [ 954.945393] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.954370] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5261882c-1b92-6bfb-8b0e-d002240d3f4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.995594] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.175376] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.285827] env[68279]: DEBUG nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 955.288458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.028s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.291469] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.337s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.291469] env[68279]: DEBUG nova.objects.instance [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lazy-loading 'resources' on Instance uuid 65688756-ad94-437f-9a36-bd7e3f7f7a2b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.317702] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 955.317941] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.318110] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.318294] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.318872] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.318872] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 955.318872] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 955.319034] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 955.319100] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 955.319299] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 955.319510] env[68279]: DEBUG nova.virt.hardware [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.320691] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28666ba5-1d06-46d2-83e1-59a414122a03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.324603] env[68279]: INFO nova.scheduler.client.report [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Deleted allocations for instance 50e08259-7915-49bb-b137-5cc6e9d53c16 [ 955.331746] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0e16bb-f768-4afd-b3c2-395d9b41bc3f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.456922] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5261882c-1b92-6bfb-8b0e-d002240d3f4a, 'name': SearchDatastore_Task, 'duration_secs': 0.052906} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.457664] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.457664] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab/5c8d5c97-2b1c-4e43-86c1-9dfcd170faab.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.457881] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b834ee66-9efa-4d1f-b12a-f4df6333dc00 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.465014] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 955.465014] env[68279]: value = "task-2963397" [ 955.465014] env[68279]: _type = "Task" [ 955.465014] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.473090] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.494741] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963396, 'name': ReconfigVM_Task, 'duration_secs': 0.688653} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.495022] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Reconfigured VM instance instance-0000004e to attach disk [datastore2] e0afa3e5-4a40-4257-851c-3cccf57b1724/e0afa3e5-4a40-4257-851c-3cccf57b1724.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.495648] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f27c9ec-8b1e-4022-adad-229a4a4e2cbe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.503404] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 955.503404] env[68279]: value = "task-2963398" [ 955.503404] env[68279]: _type = "Task" [ 955.503404] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.512722] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963398, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.677487] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963395, 'name': ReconfigVM_Task, 'duration_secs': 1.310694} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.677487] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfigured VM instance instance-00000017 to attach disk [datastore1] eccc5882-2c8b-456d-bbd2-d9ed22777a77/eccc5882-2c8b-456d-bbd2-d9ed22777a77.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.678229] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'boot_index': 0, 'encryption_secret_uuid': None, 'encryption_format': None, 'size': 0, 'encrypted': False, 'disk_bus': None, 'guest_format': None, 'encryption_options': None, 'device_name': '/dev/sda', 'device_type': 'disk', 'image_id': '01e502b7-2447-4972-9fe7-fd69f76ef71f'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594650', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'name': 'volume-064e058e-cfe0-4945-a75c-2c0d2b58a092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'eccc5882-2c8b-456d-bbd2-d9ed22777a77', 'attached_at': '', 'detached_at': '', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'serial': '064e058e-cfe0-4945-a75c-2c0d2b58a092'}, 'attachment_id': 'c65c02ca-79f4-4e4f-adf9-6567b40ba858', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=68279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 955.678466] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 955.678685] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594650', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'name': 'volume-064e058e-cfe0-4945-a75c-2c0d2b58a092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'eccc5882-2c8b-456d-bbd2-d9ed22777a77', 'attached_at': '', 'detached_at': '', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'serial': '064e058e-cfe0-4945-a75c-2c0d2b58a092'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 955.679909] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb861d05-6116-4d38-9b8a-5b38f7c834d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.701134] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549db8db-85e6-47ee-87f4-754f46be94ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.730722] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] volume-064e058e-cfe0-4945-a75c-2c0d2b58a092/volume-064e058e-cfe0-4945-a75c-2c0d2b58a092.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 955.731239] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51b2831f-e4ec-4588-9b2a-6b5265b34a57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.753478] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 955.753478] env[68279]: value = "task-2963399" [ 955.753478] env[68279]: _type = "Task" [ 955.753478] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.762981] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963399, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.826056] env[68279]: DEBUG nova.compute.manager [req-21cf9094-465f-49b4-889f-6561d1452f95 req-52ad2fbd-87ad-40a1-94bf-9e0c4fb7000c service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Received event network-vif-plugged-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 955.829131] env[68279]: DEBUG oslo_concurrency.lockutils [req-21cf9094-465f-49b4-889f-6561d1452f95 req-52ad2fbd-87ad-40a1-94bf-9e0c4fb7000c service nova] Acquiring lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.829131] env[68279]: DEBUG oslo_concurrency.lockutils [req-21cf9094-465f-49b4-889f-6561d1452f95 req-52ad2fbd-87ad-40a1-94bf-9e0c4fb7000c service nova] Lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.829131] env[68279]: DEBUG oslo_concurrency.lockutils [req-21cf9094-465f-49b4-889f-6561d1452f95 req-52ad2fbd-87ad-40a1-94bf-9e0c4fb7000c service nova] Lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.829131] env[68279]: DEBUG nova.compute.manager [req-21cf9094-465f-49b4-889f-6561d1452f95 req-52ad2fbd-87ad-40a1-94bf-9e0c4fb7000c service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] No waiting events found dispatching network-vif-plugged-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 955.829131] env[68279]: WARNING nova.compute.manager [req-21cf9094-465f-49b4-889f-6561d1452f95 req-52ad2fbd-87ad-40a1-94bf-9e0c4fb7000c service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Received unexpected event network-vif-plugged-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 for instance with vm_state building and task_state spawning. [ 955.838037] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2ecb38e0-3adb-4d85-9f5d-9a4ac2ab2ca5 tempest-ServersAdminTestJSON-253429690 tempest-ServersAdminTestJSON-253429690-project-member] Lock "50e08259-7915-49bb-b137-5cc6e9d53c16" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.156s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.904960] env[68279]: DEBUG nova.network.neutron [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Successfully updated port: f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.978509] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963397, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.015156] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963398, 'name': Rename_Task, 'duration_secs': 0.18591} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.017901] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.018439] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d9c52cc-b4e1-4530-846b-4677bc540edf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.029090] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 956.029090] env[68279]: value = "task-2963400" [ 956.029090] env[68279]: _type = "Task" [ 956.029090] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.045887] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.260274] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e5d647-ec67-45d6-9cd4-352462c0399a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.266379] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.271934] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84063179-25ae-4040-9013-95662845a2b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.306194] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e027db-d891-4371-a1d7-708e39f79f2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.313796] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5b484a-626a-414e-8d19-234d1a425ce2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.328922] env[68279]: DEBUG nova.compute.provider_tree [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.409349] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.409349] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.409349] env[68279]: DEBUG nova.network.neutron [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.475909] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660608} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.477733] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab/5c8d5c97-2b1c-4e43-86c1-9dfcd170faab.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.477733] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.477733] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7781c957-ac05-4360-bc42-66852925efe5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.484997] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 956.484997] env[68279]: value = "task-2963401" [ 956.484997] env[68279]: _type = "Task" [ 956.484997] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.494129] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.544278] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963400, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.764129] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963399, 'name': ReconfigVM_Task, 'duration_secs': 0.80234} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.764485] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfigured VM instance instance-00000017 to attach disk [datastore2] volume-064e058e-cfe0-4945-a75c-2c0d2b58a092/volume-064e058e-cfe0-4945-a75c-2c0d2b58a092.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.769527] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58ecf83f-1117-4472-8172-2a04130620ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.789016] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 956.789016] env[68279]: value = "task-2963402" [ 956.789016] env[68279]: _type = "Task" [ 956.789016] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.795386] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.832722] env[68279]: DEBUG nova.scheduler.client.report [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.954975] env[68279]: DEBUG nova.network.neutron [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.999559] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075653} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.999559] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 956.999559] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34be958-52ac-4db3-a84a-5e34fa67b55a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.025055] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab/5c8d5c97-2b1c-4e43-86c1-9dfcd170faab.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.027851] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b422a35-5351-423e-8294-0082d47d0359 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.051838] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963400, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.053659] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 957.053659] env[68279]: value = "task-2963403" [ 957.053659] env[68279]: _type = "Task" [ 957.053659] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.065156] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.214049] env[68279]: DEBUG nova.network.neutron [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Updating instance_info_cache with network_info: [{"id": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "address": "fa:16:3e:74:43:f8", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a76bc3-d8", "ovs_interfaceid": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.297516] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963402, 'name': ReconfigVM_Task, 'duration_secs': 0.313762} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.297838] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594650', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'name': 'volume-064e058e-cfe0-4945-a75c-2c0d2b58a092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'eccc5882-2c8b-456d-bbd2-d9ed22777a77', 'attached_at': '', 'detached_at': '', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'serial': '064e058e-cfe0-4945-a75c-2c0d2b58a092'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 957.298426] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1cd411c2-1c71-4cee-bd5c-9f7438afbc69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.306359] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 957.306359] env[68279]: value = "task-2963404" [ 957.306359] env[68279]: _type = "Task" [ 957.306359] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.316019] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963404, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.340205] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.049s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.343731] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.359s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.343978] env[68279]: DEBUG nova.objects.instance [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lazy-loading 'resources' on Instance uuid 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.369538] env[68279]: INFO nova.scheduler.client.report [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Deleted allocations for instance 65688756-ad94-437f-9a36-bd7e3f7f7a2b [ 957.557651] env[68279]: DEBUG oslo_vmware.api [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963400, 'name': PowerOnVM_Task, 'duration_secs': 1.223715} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.561214] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.561481] env[68279]: INFO nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Took 9.39 seconds to spawn the instance on the hypervisor. [ 957.561724] env[68279]: DEBUG nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.562898] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41cb9356-c2bd-4803-b121-bb3f6791cf88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.572445] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963403, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.717137] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.717490] env[68279]: DEBUG nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Instance network_info: |[{"id": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "address": "fa:16:3e:74:43:f8", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a76bc3-d8", "ovs_interfaceid": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 957.717924] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:43:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55764410-260e-4339-a020-6b30995584bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8a76bc3-d8c4-428b-9e5e-3f8b55802c07', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.725898] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.725945] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.726516] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64b0d9f9-5180-427c-a9a3-7450c7b4a2f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.747607] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.747607] env[68279]: value = "task-2963405" [ 957.747607] env[68279]: _type = "Task" [ 957.747607] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.756160] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963405, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.820033] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963404, 'name': Rename_Task, 'duration_secs': 0.37926} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.820096] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 957.820456] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fc15bde-f57d-49b0-ba99-6b730df7db49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.828614] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 957.828614] env[68279]: value = "task-2963406" [ 957.828614] env[68279]: _type = "Task" [ 957.828614] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.843155] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.879254] env[68279]: DEBUG oslo_concurrency.lockutils [None req-337ec97d-a1f9-40b7-9036-ce380f0cceee tempest-InstanceActionsV221TestJSON-1814189699 tempest-InstanceActionsV221TestJSON-1814189699-project-member] Lock "65688756-ad94-437f-9a36-bd7e3f7f7a2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.695s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.931309] env[68279]: DEBUG nova.compute.manager [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Received event network-changed-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 957.931508] env[68279]: DEBUG nova.compute.manager [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Refreshing instance network info cache due to event network-changed-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 957.931774] env[68279]: DEBUG oslo_concurrency.lockutils [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] Acquiring lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.932009] env[68279]: DEBUG oslo_concurrency.lockutils [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] Acquired lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.932352] env[68279]: DEBUG nova.network.neutron [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Refreshing network info cache for port f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.071977] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963403, 'name': ReconfigVM_Task, 'duration_secs': 0.547135} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.071977] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab/5c8d5c97-2b1c-4e43-86c1-9dfcd170faab.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.072625] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5724623-d303-4f9d-8712-b7794dc7ebe7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.086011] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 958.086011] env[68279]: value = "task-2963407" [ 958.086011] env[68279]: _type = "Task" [ 958.086011] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.092525] env[68279]: INFO nova.compute.manager [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Took 31.01 seconds to build instance. [ 958.098384] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963407, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.258389] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963405, 'name': CreateVM_Task, 'duration_secs': 0.498443} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.261115] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.262292] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.262292] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.262850] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 958.262850] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ad9d10d-3f8a-406c-b223-99f59f8fe987 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.268089] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 958.268089] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526aef49-73a8-1cb5-b551-0e5b0f761f44" [ 958.268089] env[68279]: _type = "Task" [ 958.268089] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.278344] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526aef49-73a8-1cb5-b551-0e5b0f761f44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.286759] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a900e7f0-fca2-4221-9078-af90d639f7f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.294131] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657fafd4-9c2d-41b4-9857-0b79905d996a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.328772] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0c8704-273b-482c-836e-6bb2e5186159 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.342764] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3e3f5b-fde2-4c33-be07-f1932de27ee5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.346762] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963406, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.360165] env[68279]: DEBUG nova.compute.provider_tree [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.597849] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd4986d9-3968-4072-b426-13995a391ddc tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.520s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.598154] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963407, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.780034] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526aef49-73a8-1cb5-b551-0e5b0f761f44, 'name': SearchDatastore_Task, 'duration_secs': 0.01932} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.780379] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.780618] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.780853] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.780993] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.781185] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.781472] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cec4885f-e70d-4e9a-82b0-c9f25d95db8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.794802] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.795025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.795788] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e28d6037-350c-42be-8862-d736f5aca7fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.804139] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 958.804139] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d73cb-22a1-4760-f148-ccc63e5b99fe" [ 958.804139] env[68279]: _type = "Task" [ 958.804139] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.816640] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d73cb-22a1-4760-f148-ccc63e5b99fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.841938] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963406, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.862120] env[68279]: DEBUG nova.scheduler.client.report [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.906984] env[68279]: DEBUG nova.network.neutron [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Updated VIF entry in instance network info cache for port f8a76bc3-d8c4-428b-9e5e-3f8b55802c07. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.907435] env[68279]: DEBUG nova.network.neutron [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Updating instance_info_cache with network_info: [{"id": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "address": "fa:16:3e:74:43:f8", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a76bc3-d8", "ovs_interfaceid": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.098486] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963407, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.320387] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d73cb-22a1-4760-f148-ccc63e5b99fe, 'name': SearchDatastore_Task, 'duration_secs': 0.01512} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.321197] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a73d54a8-2cf8-437e-8f0e-b5c114cf7fdf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.326983] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 959.326983] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52878d03-c82b-7ad4-b13f-64da3078e065" [ 959.326983] env[68279]: _type = "Task" [ 959.326983] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.338327] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52878d03-c82b-7ad4-b13f-64da3078e065, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.343683] env[68279]: DEBUG oslo_vmware.api [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963406, 'name': PowerOnVM_Task, 'duration_secs': 1.058695} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.343988] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.372598] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.029s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.374996] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.156s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.375444] env[68279]: DEBUG nova.objects.instance [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lazy-loading 'resources' on Instance uuid 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.409060] env[68279]: INFO nova.scheduler.client.report [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Deleted allocations for instance 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4 [ 959.412352] env[68279]: DEBUG oslo_concurrency.lockutils [req-3a1892ed-5de8-410f-a899-8813afa43cd5 req-7508fcf8-4e45-4ee4-b467-2de6b0f8743d service nova] Releasing lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.483474] env[68279]: DEBUG nova.compute.manager [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 959.485694] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8518bec-fc8f-4287-81f3-91e14e69a889 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.599018] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963407, 'name': Rename_Task, 'duration_secs': 1.186478} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.599785] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.600210] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8049d2a-ce1c-4b2b-967c-2f9854b7328f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.610158] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 959.610158] env[68279]: value = "task-2963408" [ 959.610158] env[68279]: _type = "Task" [ 959.610158] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.619697] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.844979] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52878d03-c82b-7ad4-b13f-64da3078e065, 'name': SearchDatastore_Task, 'duration_secs': 0.017998} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.845284] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.845628] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 01404bab-6516-4783-8b9d-0738010b3e9b/01404bab-6516-4783-8b9d-0738010b3e9b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.845892] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e03ce95e-eb2a-4ed1-86cb-6d481005dbc2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.855154] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 959.855154] env[68279]: value = "task-2963409" [ 959.855154] env[68279]: _type = "Task" [ 959.855154] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.868913] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963409, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.925545] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bd7b2514-748d-474e-9c16-c4ef6d97189b tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "1ae768c9-3e20-4dee-bdd3-35d7c7d878e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.505s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.007513] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7d50a5f0-a1e7-41b8-9e27-0cc76254c78c tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 48.098s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.123599] env[68279]: DEBUG oslo_vmware.api [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963408, 'name': PowerOnVM_Task, 'duration_secs': 0.474398} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.126919] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.127402] env[68279]: INFO nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Took 9.39 seconds to spawn the instance on the hypervisor. [ 960.127815] env[68279]: DEBUG nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.129160] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a2d43e-43c0-42c7-939c-1dd04197865d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.362710] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fc70bb-4a5a-4895-b1dd-85e6ddcba8ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.368950] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963409, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492251} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.369682] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 01404bab-6516-4783-8b9d-0738010b3e9b/01404bab-6516-4783-8b9d-0738010b3e9b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.369821] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.370080] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce69fd3e-6c87-4d7d-a1e4-2ce01a14d045 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.374954] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575099bb-2b9e-486d-93e0-efbf4756daa0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.379041] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 960.379041] env[68279]: value = "task-2963410" [ 960.379041] env[68279]: _type = "Task" [ 960.379041] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.413605] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e144325-881b-4807-8fbf-265de56795d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.419221] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.424654] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99971e58-5988-434b-bad7-448acc1dda95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.442621] env[68279]: DEBUG nova.compute.provider_tree [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.656059] env[68279]: INFO nova.compute.manager [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Took 32.76 seconds to build instance. [ 960.900640] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.331644} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.900640] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 960.901430] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56e5009-a278-44b9-9aac-b7398e24b724 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.933635] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 01404bab-6516-4783-8b9d-0738010b3e9b/01404bab-6516-4783-8b9d-0738010b3e9b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 960.934118] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f12d7958-d427-47f0-8488-515c0cb23d37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.952309] env[68279]: DEBUG nova.scheduler.client.report [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.965028] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 960.965028] env[68279]: value = "task-2963411" [ 960.965028] env[68279]: _type = "Task" [ 960.965028] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.974562] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.158971] env[68279]: DEBUG oslo_concurrency.lockutils [None req-53938721-7e44-4926-9f28-d8cdf136b67c tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.286s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.395619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.395619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.395619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.395619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.395619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.398955] env[68279]: INFO nova.compute.manager [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Terminating instance [ 961.457559] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.460233] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.819s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.460434] env[68279]: DEBUG nova.objects.instance [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lazy-loading 'resources' on Instance uuid 192734ca-f549-4461-a05a-5f00f0639977 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.478045] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963411, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.494079] env[68279]: INFO nova.scheduler.client.report [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Deleted allocations for instance 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1 [ 961.906033] env[68279]: DEBUG nova.compute.manager [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 961.906320] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.907299] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d53cac-9a8f-406a-92b5-56aa6653cd85 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.919292] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 961.921248] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b776942d-eb86-4ef9-8f56-2bbbab021705 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.931143] env[68279]: DEBUG oslo_vmware.api [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 961.931143] env[68279]: value = "task-2963412" [ 961.931143] env[68279]: _type = "Task" [ 961.931143] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.946525] env[68279]: DEBUG oslo_vmware.api [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.978643] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963411, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.003603] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2728146-a1c9-4c83-9134-0c7f666cbfce tempest-MigrationsAdminTest-373155627 tempest-MigrationsAdminTest-373155627-project-member] Lock "6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.774s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.362015] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d094d22-83d6-425d-af0d-d4c4a3a63600 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.373755] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee233e9-0622-4526-84d9-c8b3e7156a53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.415627] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4cf9c5-d997-4dad-acb8-04e7246b83c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.425420] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb50bed-b5a2-4ba7-a188-5a08f8b8902a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.443017] env[68279]: DEBUG nova.compute.provider_tree [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.451760] env[68279]: DEBUG oslo_vmware.api [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963412, 'name': PowerOffVM_Task, 'duration_secs': 0.231514} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.452095] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.452275] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.452538] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfa6c732-3db3-40f0-af01-ee89806ea661 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.475827] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963411, 'name': ReconfigVM_Task, 'duration_secs': 1.168925} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.476153] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 01404bab-6516-4783-8b9d-0738010b3e9b/01404bab-6516-4783-8b9d-0738010b3e9b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.477045] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2910a554-8374-4e24-9cfd-5a5b1c184712 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.485316] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 962.485316] env[68279]: value = "task-2963414" [ 962.485316] env[68279]: _type = "Task" [ 962.485316] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.496024] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963414, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.532463] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.532717] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.532855] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Deleting the datastore file [datastore1] 97c3000f-a3d8-45c1-b0a4-12eb2b22b572 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.533144] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd4b20bf-dbfd-404d-987c-1c240e9cabf1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.540515] env[68279]: DEBUG oslo_vmware.api [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for the task: (returnval){ [ 962.540515] env[68279]: value = "task-2963415" [ 962.540515] env[68279]: _type = "Task" [ 962.540515] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.550090] env[68279]: DEBUG oslo_vmware.api [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963415, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.947951] env[68279]: DEBUG nova.scheduler.client.report [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 963.001130] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963414, 'name': Rename_Task, 'duration_secs': 0.2236} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.001532] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.001861] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aea08619-5920-4bea-bbf2-860b5a4d5f33 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.011340] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 963.011340] env[68279]: value = "task-2963416" [ 963.011340] env[68279]: _type = "Task" [ 963.011340] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.025479] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.054712] env[68279]: DEBUG oslo_vmware.api [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Task: {'id': task-2963415, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212266} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.055038] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.055235] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.055422] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.055599] env[68279]: INFO nova.compute.manager [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Took 1.15 seconds to destroy the instance on the hypervisor. [ 963.055846] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 963.056064] env[68279]: DEBUG nova.compute.manager [-] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 963.056185] env[68279]: DEBUG nova.network.neutron [-] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.458083] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.998s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.465513] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.520s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.465513] env[68279]: DEBUG nova.objects.instance [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lazy-loading 'resources' on Instance uuid a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.492261] env[68279]: INFO nova.scheduler.client.report [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Deleted allocations for instance 192734ca-f549-4461-a05a-5f00f0639977 [ 963.518463] env[68279]: DEBUG nova.compute.manager [req-d2f2c4b5-8513-4a9a-8239-1daeb529b7cf req-d8eff8e4-10ed-4dd5-8f36-3da9a41c22a0 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Received event network-vif-deleted-6d3ffb2b-c189-49f3-9753-d336f4c7157f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 963.518686] env[68279]: INFO nova.compute.manager [req-d2f2c4b5-8513-4a9a-8239-1daeb529b7cf req-d8eff8e4-10ed-4dd5-8f36-3da9a41c22a0 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Neutron deleted interface 6d3ffb2b-c189-49f3-9753-d336f4c7157f; detaching it from the instance and deleting it from the info cache [ 963.518865] env[68279]: DEBUG nova.network.neutron [req-d2f2c4b5-8513-4a9a-8239-1daeb529b7cf req-d8eff8e4-10ed-4dd5-8f36-3da9a41c22a0 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.527162] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963416, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.966717] env[68279]: DEBUG nova.network.neutron [-] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.995291] env[68279]: DEBUG oslo_concurrency.lockutils [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.995408] env[68279]: DEBUG oslo_concurrency.lockutils [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.995628] env[68279]: DEBUG nova.compute.manager [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 963.996715] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6322abd-1ccf-4cf1-ae86-1dd617db4d33 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.003025] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1e43d561-69ed-4824-b58d-88656d48da9b tempest-ImagesOneServerNegativeTestJSON-1063629953 tempest-ImagesOneServerNegativeTestJSON-1063629953-project-member] Lock "192734ca-f549-4461-a05a-5f00f0639977" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.872s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.010775] env[68279]: DEBUG nova.compute.manager [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 964.011677] env[68279]: DEBUG nova.objects.instance [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lazy-loading 'flavor' on Instance uuid a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.028806] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca8131a4-603b-4d9c-9e58-0791d9e9131b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.046858] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963416, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.055031] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afa863f-1c82-4785-8f36-4d57f30ee621 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.109434] env[68279]: DEBUG nova.compute.manager [req-d2f2c4b5-8513-4a9a-8239-1daeb529b7cf req-d8eff8e4-10ed-4dd5-8f36-3da9a41c22a0 service nova] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Detach interface failed, port_id=6d3ffb2b-c189-49f3-9753-d336f4c7157f, reason: Instance 97c3000f-a3d8-45c1-b0a4-12eb2b22b572 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 964.432166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31bec4e-b001-4261-80f0-1eff2a11bd36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.446347] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01c0d70-690c-43ef-af6a-97dc7ba9392a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.487788] env[68279]: INFO nova.compute.manager [-] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Took 1.43 seconds to deallocate network for instance. [ 964.497548] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8236484-a3df-4417-b7e0-c29c086be597 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.505834] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73ee290-ecdd-4c4d-ac31-490d0b19a7d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.529681] env[68279]: DEBUG nova.compute.provider_tree [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.548273] env[68279]: DEBUG oslo_vmware.api [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963416, 'name': PowerOnVM_Task, 'duration_secs': 1.085678} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.548273] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.548273] env[68279]: INFO nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Took 9.26 seconds to spawn the instance on the hypervisor. [ 964.548273] env[68279]: DEBUG nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.549500] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571fa67d-af98-4c68-b96f-6221a6734d93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.998411] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.039915] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.041168] env[68279]: DEBUG nova.scheduler.client.report [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 965.046488] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c06962fe-ae4e-4b8e-9aee-06265904b0e0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.057337] env[68279]: DEBUG oslo_vmware.api [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 965.057337] env[68279]: value = "task-2963417" [ 965.057337] env[68279]: _type = "Task" [ 965.057337] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.070962] env[68279]: DEBUG oslo_vmware.api [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.074219] env[68279]: INFO nova.compute.manager [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Took 35.07 seconds to build instance. [ 965.552476] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.089s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.553572] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.773s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.553760] env[68279]: DEBUG nova.objects.instance [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 965.573724] env[68279]: DEBUG oslo_vmware.api [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963417, 'name': PowerOffVM_Task, 'duration_secs': 0.379762} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.573855] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.573994] env[68279]: DEBUG nova.compute.manager [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 965.574784] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b332ac44-0981-4c9a-a726-70db36fb5b5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.578840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-57db9dd3-a84f-490a-b237-566220d55c1e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.584s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.580403] env[68279]: INFO nova.scheduler.client.report [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted allocations for instance a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87 [ 966.094905] env[68279]: DEBUG oslo_concurrency.lockutils [None req-dd1c587b-913d-48bf-9a46-2e05a443904d tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.099s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.098825] env[68279]: DEBUG oslo_concurrency.lockutils [None req-812beedc-1c2f-4c71-8ffd-7b3f44209b8e tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.569s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.570969] env[68279]: DEBUG oslo_concurrency.lockutils [None req-abbcf8c2-627a-4caf-9c06-6d9496556388 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.572066] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.712s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.575268] env[68279]: INFO nova.compute.claims [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.625697] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.625766] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.673509] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "e6f39528-384c-456b-8155-a6856bab3ce0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.673509] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "e6f39528-384c-456b-8155-a6856bab3ce0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.730183] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "0731fdf9-f90c-46a4-9165-f6d91767b51b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.731092] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.129806] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 967.141627] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "01404bab-6516-4783-8b9d-0738010b3e9b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.141627] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.141627] env[68279]: INFO nova.compute.manager [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Shelving [ 967.176420] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 967.236028] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 967.596259] env[68279]: DEBUG nova.objects.instance [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lazy-loading 'flavor' on Instance uuid a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.669118] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.700571] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.761238] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.833091] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "67466e30-5944-490c-a89b-2d32c59525be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.833347] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "67466e30-5944-490c-a89b-2d32c59525be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.833548] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "67466e30-5944-490c-a89b-2d32c59525be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.833732] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "67466e30-5944-490c-a89b-2d32c59525be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.833899] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "67466e30-5944-490c-a89b-2d32c59525be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.836455] env[68279]: INFO nova.compute.manager [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Terminating instance [ 968.107437] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.107437] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquired lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.107659] env[68279]: DEBUG nova.network.neutron [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.111026] env[68279]: DEBUG nova.objects.instance [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lazy-loading 'info_cache' on Instance uuid a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.157097] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.157334] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73d2824b-4bdc-42a3-9bb7-29413c6d0702 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.166878] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 968.166878] env[68279]: value = "task-2963418" [ 968.166878] env[68279]: _type = "Task" [ 968.166878] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.184364] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14406ae0-56e4-41b4-9074-c9c1b7b677f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.195405] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.201943] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850756e8-fbc2-48dc-857e-70dfe6b19175 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.243172] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8beced-abc8-4aef-8f11-f479c9584d4b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.252774] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29745587-0fac-4a6b-9e5e-a32889901ae7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.272809] env[68279]: DEBUG nova.compute.provider_tree [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.341361] env[68279]: DEBUG nova.compute.manager [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.342933] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.344146] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e5e1fd-d462-4387-bc0c-e8926d544790 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.357248] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.360915] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f01ec6d3-0240-4e7c-9986-ed57b8e23e8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.367266] env[68279]: DEBUG oslo_vmware.api [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 968.367266] env[68279]: value = "task-2963419" [ 968.367266] env[68279]: _type = "Task" [ 968.367266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.377614] env[68279]: DEBUG oslo_vmware.api [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.613722] env[68279]: DEBUG nova.objects.base [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 968.650455] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "1bd92b53-46c0-4b63-be20-857cffed87cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.650455] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.682560] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963418, 'name': PowerOffVM_Task, 'duration_secs': 0.235112} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.682821] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.684051] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3cb39e-357e-419f-a0fd-e99933656e5b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.709711] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d9acee-c374-4b17-a865-3a07a61afaff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.767917] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "4dd80f75-13d0-43d7-8042-b175dff50250" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.768187] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4dd80f75-13d0-43d7-8042-b175dff50250" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.778020] env[68279]: DEBUG nova.scheduler.client.report [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.817018] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "4090e245-b026-4d3a-b7f0-e61543701d8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.817018] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.877911] env[68279]: DEBUG oslo_vmware.api [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963419, 'name': PowerOffVM_Task, 'duration_secs': 0.265388} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.878498] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.878943] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.879084] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7c13578-1d7b-4c7e-8934-af239c1202ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.944882] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.945136] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.945319] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleting the datastore file [datastore2] 67466e30-5944-490c-a89b-2d32c59525be {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.945593] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97bde312-3522-43d7-acf6-9dfb965ddc3f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.953586] env[68279]: DEBUG oslo_vmware.api [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 968.953586] env[68279]: value = "task-2963421" [ 968.953586] env[68279]: _type = "Task" [ 968.953586] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.962830] env[68279]: DEBUG oslo_vmware.api [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.149597] env[68279]: DEBUG nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.223365] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 969.224179] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-440eea2a-f8ea-409d-b0f0-7fbec8d85029 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.232351] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 969.232351] env[68279]: value = "task-2963422" [ 969.232351] env[68279]: _type = "Task" [ 969.232351] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.245607] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963422, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.272140] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.282730] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.283309] env[68279]: DEBUG nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 969.289703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.179s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.289703] env[68279]: DEBUG nova.objects.instance [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'resources' on Instance uuid 02f34ac7-9deb-4714-92cb-bb507fde1e74 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.320025] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.451197] env[68279]: DEBUG nova.network.neutron [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Updating instance_info_cache with network_info: [{"id": "efecef48-e3c5-47bc-8da4-6227052ad445", "address": "fa:16:3e:30:2c:ea", "network": {"id": "9ba20742-e6b8-4dac-b309-b03328be6aae", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1793933921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d622dfbecc49c0b8445a3453ef88ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefecef48-e3", "ovs_interfaceid": "efecef48-e3c5-47bc-8da4-6227052ad445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.466666] env[68279]: DEBUG oslo_vmware.api [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348899} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.467270] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.467345] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.467475] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.468037] env[68279]: INFO nova.compute.manager [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Took 1.13 seconds to destroy the instance on the hypervisor. [ 969.468037] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.468451] env[68279]: DEBUG nova.compute.manager [-] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.468451] env[68279]: DEBUG nova.network.neutron [-] [instance: 67466e30-5944-490c-a89b-2d32c59525be] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.674854] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.743084] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963422, 'name': CreateSnapshot_Task, 'duration_secs': 0.444743} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.743607] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 969.744130] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ea52b7-3c7a-4cef-b7b6-3072c6bd7027 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.792285] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.799100] env[68279]: DEBUG nova.compute.utils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 969.803942] env[68279]: DEBUG nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 969.845328] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.865359] env[68279]: DEBUG nova.compute.manager [req-e84f4ca5-dc2f-4c76-9f98-829f98e721c4 req-fc3f91fd-7c7c-412f-89a7-16ab30844f90 service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Received event network-vif-deleted-c7a730dc-6b0a-4b9e-96db-c7f8c611ae16 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 969.865773] env[68279]: INFO nova.compute.manager [req-e84f4ca5-dc2f-4c76-9f98-829f98e721c4 req-fc3f91fd-7c7c-412f-89a7-16ab30844f90 service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Neutron deleted interface c7a730dc-6b0a-4b9e-96db-c7f8c611ae16; detaching it from the instance and deleting it from the info cache [ 969.866037] env[68279]: DEBUG nova.network.neutron [req-e84f4ca5-dc2f-4c76-9f98-829f98e721c4 req-fc3f91fd-7c7c-412f-89a7-16ab30844f90 service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.954285] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Releasing lock "refresh_cache-a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.209462] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861a1eb2-f92c-4056-b4e6-066e770de8a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.216473] env[68279]: DEBUG nova.network.neutron [-] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.218555] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d04bc7-cc69-4615-b6f8-b8affb67b931 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.253408] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25746cf8-63b0-4d79-a3ca-576fd2f8f7b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.263166] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 970.263792] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b74165ae-f42a-45c8-9e37-f87d13759237 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.272129] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a025ad-e087-4b09-9702-9f0604f37e98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.277405] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 970.277405] env[68279]: value = "task-2963423" [ 970.277405] env[68279]: _type = "Task" [ 970.277405] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.290091] env[68279]: DEBUG nova.compute.provider_tree [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.298814] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963423, 'name': CloneVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.304436] env[68279]: DEBUG nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.373425] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6f19121-b1e1-4ae6-ae7a-2642a39d7f1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.382455] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302652c3-528a-41ab-bed0-80be7a57fb7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.424391] env[68279]: DEBUG nova.compute.manager [req-e84f4ca5-dc2f-4c76-9f98-829f98e721c4 req-fc3f91fd-7c7c-412f-89a7-16ab30844f90 service nova] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Detach interface failed, port_id=c7a730dc-6b0a-4b9e-96db-c7f8c611ae16, reason: Instance 67466e30-5944-490c-a89b-2d32c59525be could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 970.575918] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.576274] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.723584] env[68279]: INFO nova.compute.manager [-] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Took 1.25 seconds to deallocate network for instance. [ 970.788761] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963423, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.793342] env[68279]: DEBUG nova.scheduler.client.report [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.961901] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.962179] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89021d9a-1d96-49d5-905c-1901a766508c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.969483] env[68279]: DEBUG oslo_vmware.api [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 970.969483] env[68279]: value = "task-2963424" [ 970.969483] env[68279]: _type = "Task" [ 970.969483] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.976984] env[68279]: DEBUG oslo_vmware.api [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963424, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.081269] env[68279]: DEBUG nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 971.095369] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.096043] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.239491] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.288307] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963423, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.298890] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.301181] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.301403] env[68279]: DEBUG nova.objects.instance [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lazy-loading 'resources' on Instance uuid 2cdd785d-6758-469f-b1f6-266154853f8c {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.314719] env[68279]: DEBUG nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 971.321911] env[68279]: INFO nova.scheduler.client.report [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted allocations for instance 02f34ac7-9deb-4714-92cb-bb507fde1e74 [ 971.339948] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.340166] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.340373] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.340601] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.340753] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.341184] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.341184] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.341307] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.341698] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.341698] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.341803] env[68279]: DEBUG nova.virt.hardware [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.344198] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a14091-5d43-408f-9efb-de9b780505d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.351935] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca825cf-5e20-4db8-aa8f-c96615df945d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.370685] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.380895] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Creating folder: Project (a22d48aad2d049609d9d3b8f8f242b53). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 971.381967] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-407e0fe0-091d-4751-bff4-726bcf52444f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.394030] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Created folder: Project (a22d48aad2d049609d9d3b8f8f242b53) in parent group-v594445. [ 971.394229] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Creating folder: Instances. Parent ref: group-v594673. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 971.394471] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d16530d-169f-4da9-85b6-62d59d964874 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.403788] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Created folder: Instances in parent group-v594673. [ 971.404049] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 971.404252] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 971.404458] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0c7e314-87af-4297-a251-28b471115801 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.421919] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.421919] env[68279]: value = "task-2963427" [ 971.421919] env[68279]: _type = "Task" [ 971.421919] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.433837] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963427, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.436246] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.436503] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.436682] env[68279]: INFO nova.compute.manager [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Shelving [ 971.481491] env[68279]: DEBUG oslo_vmware.api [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963424, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.598389] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.604643] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.604869] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.604986] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.605151] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.605298] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.605445] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.605579] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 971.605723] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.788702] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963423, 'name': CloneVM_Task, 'duration_secs': 1.325218} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.789085] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Created linked-clone VM from snapshot [ 971.789754] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bc5476-1439-4ea6-a28d-8a24b8c23ae0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.796493] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Uploading image b3d5811d-7a7e-4a27-a53f-44bd9304ade5 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 971.817098] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 971.817098] env[68279]: value = "vm-594672" [ 971.817098] env[68279]: _type = "VirtualMachine" [ 971.817098] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 971.817395] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5c6a5508-c13a-43dc-b1e6-0933dc461818 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.823728] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lease: (returnval){ [ 971.823728] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fc04f2-48bd-b3a6-96e7-7901ab603b97" [ 971.823728] env[68279]: _type = "HttpNfcLease" [ 971.823728] env[68279]: } obtained for exporting VM: (result){ [ 971.823728] env[68279]: value = "vm-594672" [ 971.823728] env[68279]: _type = "VirtualMachine" [ 971.823728] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 971.824027] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the lease: (returnval){ [ 971.824027] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fc04f2-48bd-b3a6-96e7-7901ab603b97" [ 971.824027] env[68279]: _type = "HttpNfcLease" [ 971.824027] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 971.831696] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bebe6959-bc39-48df-9502-975ba6538889 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "02f34ac7-9deb-4714-92cb-bb507fde1e74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.630s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.833952] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 971.833952] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fc04f2-48bd-b3a6-96e7-7901ab603b97" [ 971.833952] env[68279]: _type = "HttpNfcLease" [ 971.833952] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 971.933041] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963427, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.981304] env[68279]: DEBUG oslo_vmware.api [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963424, 'name': PowerOnVM_Task, 'duration_secs': 0.564257} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.981580] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.981778] env[68279]: DEBUG nova.compute.manager [None req-4b271de7-2356-4ee0-b8ad-7ac29bc67772 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.984769] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b11745-94eb-4142-93a5-de60bc8fd5f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.108963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.152771] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a687bdd9-da1c-4d71-bac6-1295bf16671b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.161973] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2859c107-bdc7-4cb5-a8ab-a1d2d494f967 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.193804] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89972422-8596-4d6c-b31b-86f7ff5d1ed4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.201890] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10919da8-9a9e-40f1-bff4-6f9c073b4e97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.216032] env[68279]: DEBUG nova.compute.provider_tree [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.334998] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 972.334998] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fc04f2-48bd-b3a6-96e7-7901ab603b97" [ 972.334998] env[68279]: _type = "HttpNfcLease" [ 972.334998] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 972.335323] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 972.335323] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52fc04f2-48bd-b3a6-96e7-7901ab603b97" [ 972.335323] env[68279]: _type = "HttpNfcLease" [ 972.335323] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 972.336386] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811de193-4158-4a40-b560-7fcc25a778d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.344771] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd4f8-b0f0-797b-105c-9bb67b840e4a/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 972.344926] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd4f8-b0f0-797b-105c-9bb67b840e4a/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 972.435147] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963427, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.446469] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-43b02016-5bbc-47d5-be4d-64100ddf216e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.452347] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.453765] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e9a98b9-fe91-4529-8428-221fc1bf53aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.464598] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 972.464598] env[68279]: value = "task-2963429" [ 972.464598] env[68279]: _type = "Task" [ 972.464598] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.476660] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.719301] env[68279]: DEBUG nova.scheduler.client.report [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.725177] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.725456] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.939286] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963427, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.975604] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963429, 'name': PowerOffVM_Task, 'duration_secs': 0.292805} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.976411] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.977130] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eab12ba-3b0c-4e2c-8406-fef22650ef69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.003743] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d04634-b121-496d-b602-4a660e28d6fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.228603] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.927s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.234611] env[68279]: DEBUG nova.compute.utils [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 973.237737] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 21.716s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.280228] env[68279]: INFO nova.scheduler.client.report [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Deleted allocations for instance 2cdd785d-6758-469f-b1f6-266154853f8c [ 973.437382] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963427, 'name': CreateVM_Task, 'duration_secs': 1.582927} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.437721] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 973.438381] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.438855] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.439331] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 973.439663] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca826c7f-88df-4b21-8361-61ab2cb5cf14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.448678] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 973.448678] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52925a03-b435-1c2b-19fd-f4d6d0e03f35" [ 973.448678] env[68279]: _type = "Task" [ 973.448678] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.462291] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52925a03-b435-1c2b-19fd-f4d6d0e03f35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.517377] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 973.517751] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6cd4b678-745d-4054-8332-557924430a72 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.530051] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 973.530051] env[68279]: value = "task-2963430" [ 973.530051] env[68279]: _type = "Task" [ 973.530051] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.543463] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963430, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.744769] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.019s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.790636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-730b42ca-c2a8-4383-b5e7-e2264c37cd17 tempest-InstanceActionsTestJSON-1297792693 tempest-InstanceActionsTestJSON-1297792693-project-member] Lock "2cdd785d-6758-469f-b1f6-266154853f8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.426s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.964584] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52925a03-b435-1c2b-19fd-f4d6d0e03f35, 'name': SearchDatastore_Task, 'duration_secs': 0.023752} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.967599] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.967845] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.968092] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.968234] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.968412] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.970243] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8520ff6d-4e20-496a-932a-c806ddf7f816 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.980882] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.980882] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.981605] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bffce62-7ad9-4d7e-bff9-edf76f08f0a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.992094] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 973.992094] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523f9667-6d10-54a0-6346-9368cdff7bc6" [ 973.992094] env[68279]: _type = "Task" [ 973.992094] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.003479] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523f9667-6d10-54a0-6346-9368cdff7bc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.045872] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963430, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.186301] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dd36a7-8261-4b18-8c1e-4875460b962d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.195496] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ee7983-04d7-4325-b2bf-49e48af49c07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.230020] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdf86dc-a081-4c95-a41e-05baa06ee164 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.239179] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16aa9879-b369-4ff1-b3b8-be36bd43ed93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.255165] env[68279]: DEBUG nova.compute.provider_tree [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.504137] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523f9667-6d10-54a0-6346-9368cdff7bc6, 'name': SearchDatastore_Task, 'duration_secs': 0.019079} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.505024] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7659f07-c38e-4fa8-a68c-03695a8e5446 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.511162] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 974.511162] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523f742d-5697-52ed-39d0-be18577bea72" [ 974.511162] env[68279]: _type = "Task" [ 974.511162] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.520255] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523f742d-5697-52ed-39d0-be18577bea72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.542348] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963430, 'name': CreateSnapshot_Task, 'duration_secs': 0.783204} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.542673] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 974.543489] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280a7069-8a3c-447a-8c48-96cfd15bb29c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.758880] env[68279]: DEBUG nova.scheduler.client.report [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.809840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.810153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.810390] env[68279]: INFO nova.compute.manager [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Attaching volume c6b7b8a8-0a05-44c0-8d92-a721c800ec19 to /dev/sdb [ 974.850604] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90abef58-572e-4bb7-9cee-20ffd072ad6c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.859887] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b75642-a003-4e6b-803f-832d118f25d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.876471] env[68279]: DEBUG nova.virt.block_device [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating existing volume attachment record: 1a67a3c9-f577-4857-aa1b-7e35e1b7b5d5 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 975.025461] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523f742d-5697-52ed-39d0-be18577bea72, 'name': SearchDatastore_Task, 'duration_secs': 0.016804} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.025954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.025954] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 975.026693] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8a5060f-f667-4b0e-9e4b-51e2622e6a7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.034293] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 975.034293] env[68279]: value = "task-2963432" [ 975.034293] env[68279]: _type = "Task" [ 975.034293] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.044568] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.066115] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 975.066294] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-90e7fd7e-5dd6-40e4-bd38-fd9e340497ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.078325] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 975.078325] env[68279]: value = "task-2963433" [ 975.078325] env[68279]: _type = "Task" [ 975.078325] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.091717] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.247740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.248113] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.549798] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963432, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.595381] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.751429] env[68279]: DEBUG nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 975.775685] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.534s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.779632] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.216s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.779632] env[68279]: DEBUG nova.objects.instance [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lazy-loading 'resources' on Instance uuid 80d881c8-3363-4cf8-bf16-a715d8739335 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.961338] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.961755] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.962056] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.962291] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.962749] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.965355] env[68279]: INFO nova.compute.manager [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Terminating instance [ 976.047363] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963432, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687303} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.047654] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.047856] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.048115] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2cc1093d-e4b2-48fa-bfdc-a4442ba754b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.057612] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 976.057612] env[68279]: value = "task-2963436" [ 976.057612] env[68279]: _type = "Task" [ 976.057612] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.069758] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.091645] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.277236] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.341183] env[68279]: INFO nova.scheduler.client.report [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted allocation for migration c434e3c1-7ad3-4c78-887a-0bc29e467e06 [ 976.471528] env[68279]: DEBUG nova.compute.manager [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 976.471782] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 976.472846] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebee4303-562c-4285-a9a2-e3afb6fecd60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.481791] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 976.482069] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-295bbbe9-1285-4e12-99a5-40773cf5cdef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.492676] env[68279]: DEBUG oslo_vmware.api [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 976.492676] env[68279]: value = "task-2963437" [ 976.492676] env[68279]: _type = "Task" [ 976.492676] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.506905] env[68279]: DEBUG oslo_vmware.api [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.571766] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121428} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.572058] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 976.572929] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79259763-2acf-4c30-9d90-78cf3167e767 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.607030] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.610843] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ad3029d-b9c6-4bc9-9272-940a1a6bc8ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.628599] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.636373] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 976.636373] env[68279]: value = "task-2963438" [ 976.636373] env[68279]: _type = "Task" [ 976.636373] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.650328] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963438, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.721701] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b296197f-bd38-45a9-a49b-a4bd3bd46dab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.730170] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d21435-e1c9-4f2e-beda-449f714f74cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.765409] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2465d83a-820e-4dc2-b12b-a57e314880e0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.774468] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb81316b-83b3-45ad-8624-b220a81aa8cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.790421] env[68279]: DEBUG nova.compute.provider_tree [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.851597] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6df3dfaa-d5ad-41a2-aad6-766174883b19 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 28.296s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.003856] env[68279]: DEBUG oslo_vmware.api [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963437, 'name': PowerOffVM_Task, 'duration_secs': 0.199425} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.004159] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 977.004332] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.004593] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93cfc5aa-74da-413b-ab53-8d286a0bdce8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.091098] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.150728] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963438, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.297386] env[68279]: DEBUG nova.scheduler.client.report [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.592830] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.650767] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963438, 'name': ReconfigVM_Task, 'duration_secs': 0.709508} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.651079] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.651740] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ddba0e7-9d79-494f-806f-5f9fc550f163 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.661447] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 977.661447] env[68279]: value = "task-2963441" [ 977.661447] env[68279]: _type = "Task" [ 977.661447] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.671253] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963441, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.802811] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.806849] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.808s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.807182] env[68279]: DEBUG nova.objects.instance [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lazy-loading 'resources' on Instance uuid 97c3000f-a3d8-45c1-b0a4-12eb2b22b572 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.824315] env[68279]: INFO nova.scheduler.client.report [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted allocations for instance 80d881c8-3363-4cf8-bf16-a715d8739335 [ 978.022020] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "e1b12b1c-5755-41eb-b550-88c573a09877" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.022292] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "e1b12b1c-5755-41eb-b550-88c573a09877" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.092571] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task} progress is 95%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.174863] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963441, 'name': Rename_Task, 'duration_secs': 0.190725} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.174863] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.174863] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-492b53af-9a6c-43b4-a3cb-bb85b0d54ac8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.181989] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 978.181989] env[68279]: value = "task-2963442" [ 978.181989] env[68279]: _type = "Task" [ 978.181989] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.191275] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.333937] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c9aea2da-efbe-4f08-a39a-5dc9911cf4cf tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "80d881c8-3363-4cf8-bf16-a715d8739335" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.815s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.437329] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 978.438534] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 978.438808] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleting the datastore file [datastore2] 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.442601] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45855ea3-a673-4ff1-a7c5-52a5cbc6e464 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.453211] env[68279]: DEBUG oslo_vmware.api [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 978.453211] env[68279]: value = "task-2963443" [ 978.453211] env[68279]: _type = "Task" [ 978.453211] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.467418] env[68279]: DEBUG oslo_vmware.api [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963443, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.524489] env[68279]: DEBUG nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 978.595097] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963433, 'name': CloneVM_Task, 'duration_secs': 3.479448} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.598533] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Created linked-clone VM from snapshot [ 978.599645] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646110a9-f0bd-48dd-bf0f-c8693cf03fd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.609239] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Uploading image c23360f2-77b9-4b89-9c6d-1aafb5e74c2f {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 978.637497] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 978.637497] env[68279]: value = "vm-594677" [ 978.637497] env[68279]: _type = "VirtualMachine" [ 978.637497] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 978.637798] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a6b5aba7-dc79-4d1c-b58d-bd6a99adf283 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.648467] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lease: (returnval){ [ 978.648467] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5260cf21-17fa-925c-7faf-c6f861244a87" [ 978.648467] env[68279]: _type = "HttpNfcLease" [ 978.648467] env[68279]: } obtained for exporting VM: (result){ [ 978.648467] env[68279]: value = "vm-594677" [ 978.648467] env[68279]: _type = "VirtualMachine" [ 978.648467] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 978.648802] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the lease: (returnval){ [ 978.648802] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5260cf21-17fa-925c-7faf-c6f861244a87" [ 978.648802] env[68279]: _type = "HttpNfcLease" [ 978.648802] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 978.659515] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.659515] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5260cf21-17fa-925c-7faf-c6f861244a87" [ 978.659515] env[68279]: _type = "HttpNfcLease" [ 978.659515] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 978.692903] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963442, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.749566] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6509a96f-aebf-408c-8ace-16a2846ce87f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.758891] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdb4aca-924d-47a2-8cf1-2b42a3dd7458 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.765021] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.765296] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.765537] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.765721] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.765886] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.795822] env[68279]: INFO nova.compute.manager [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Terminating instance [ 978.799121] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ee8e96-7552-4647-aefc-e57f549805b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.808894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e9805f-062d-4e09-b29b-2cbce637ce2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.824672] env[68279]: DEBUG nova.compute.provider_tree [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.965437] env[68279]: DEBUG oslo_vmware.api [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963443, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252807} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.965538] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.965886] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.965886] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.966279] env[68279]: INFO nova.compute.manager [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Took 2.49 seconds to destroy the instance on the hypervisor. [ 978.966344] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 978.966900] env[68279]: DEBUG nova.compute.manager [-] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 978.966900] env[68279]: DEBUG nova.network.neutron [-] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 979.044877] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.158642] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.158642] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5260cf21-17fa-925c-7faf-c6f861244a87" [ 979.158642] env[68279]: _type = "HttpNfcLease" [ 979.158642] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 979.159207] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 979.159207] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5260cf21-17fa-925c-7faf-c6f861244a87" [ 979.159207] env[68279]: _type = "HttpNfcLease" [ 979.159207] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 979.159910] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0312511b-c74c-4670-86bf-2b84cad44134 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.168369] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bac162-4ba0-542f-c9fa-8a0aab02d7d1/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 979.168555] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bac162-4ba0-542f-c9fa-8a0aab02d7d1/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 979.239585] env[68279]: DEBUG oslo_vmware.api [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963442, 'name': PowerOnVM_Task, 'duration_secs': 0.605762} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.241338] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.241614] env[68279]: INFO nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Took 7.93 seconds to spawn the instance on the hypervisor. [ 979.241835] env[68279]: DEBUG nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.243215] env[68279]: DEBUG nova.compute.manager [req-3e5f9b12-2543-4497-8baa-96114c042cc7 req-31ae2505-fd3e-4b16-9b2b-ce9b87b6d754 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Received event network-vif-deleted-89c7e747-362e-4106-8694-7df62edf0bbd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 979.243259] env[68279]: INFO nova.compute.manager [req-3e5f9b12-2543-4497-8baa-96114c042cc7 req-31ae2505-fd3e-4b16-9b2b-ce9b87b6d754 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Neutron deleted interface 89c7e747-362e-4106-8694-7df62edf0bbd; detaching it from the instance and deleting it from the info cache [ 979.243426] env[68279]: DEBUG nova.network.neutron [req-3e5f9b12-2543-4497-8baa-96114c042cc7 req-31ae2505-fd3e-4b16-9b2b-ce9b87b6d754 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.245509] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348f5f9c-ac24-4a19-9bf4-e895a53ad3a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.267328] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a173c025-0e26-4192-ad28-c7d0db4a95e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.305449] env[68279]: DEBUG nova.compute.manager [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 979.305708] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.306849] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d7d607-96ca-461c-a504-a3b377843e9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.315811] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.316080] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44543abe-2b0b-4017-907c-4cff766cbdc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.323163] env[68279]: DEBUG oslo_vmware.api [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 979.323163] env[68279]: value = "task-2963445" [ 979.323163] env[68279]: _type = "Task" [ 979.323163] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.327636] env[68279]: DEBUG nova.scheduler.client.report [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.337833] env[68279]: DEBUG oslo_vmware.api [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.403233] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.403470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.403654] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.405176] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.733719] env[68279]: DEBUG nova.network.neutron [-] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.752166] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36971f74-4299-4f25-b95a-4e05ba0b70e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.764032] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc17722-353d-4679-b9d1-7807d3ab03f0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.784296] env[68279]: INFO nova.compute.manager [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Took 34.94 seconds to build instance. [ 979.811626] env[68279]: DEBUG nova.compute.manager [req-3e5f9b12-2543-4497-8baa-96114c042cc7 req-31ae2505-fd3e-4b16-9b2b-ce9b87b6d754 service nova] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Detach interface failed, port_id=89c7e747-362e-4106-8694-7df62edf0bbd, reason: Instance 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 979.835815] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.029s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.838018] env[68279]: DEBUG oslo_vmware.api [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963445, 'name': PowerOffVM_Task, 'duration_secs': 0.216367} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.838714] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.170s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.840125] env[68279]: INFO nova.compute.claims [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.842929] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.843178] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 979.843747] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6286619-3aba-482d-8191-b96b63aa6fb5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.871779] env[68279]: INFO nova.scheduler.client.report [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Deleted allocations for instance 97c3000f-a3d8-45c1-b0a4-12eb2b22b572 [ 979.924262] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 979.924262] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 979.924262] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleting the datastore file [datastore1] 866eb440-4fc9-4708-8a3b-b53f2be3f6c8 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.924505] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4828723-7ffc-49f3-85d2-fabc57844bf8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.933645] env[68279]: DEBUG oslo_vmware.api [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 979.933645] env[68279]: value = "task-2963447" [ 979.933645] env[68279]: _type = "Task" [ 979.933645] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.943123] env[68279]: DEBUG oslo_vmware.api [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.946163] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 979.946633] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594679', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'name': 'volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'serial': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 979.947727] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdc0d1d-1b94-4a56-a79c-52712e47e196 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.968932] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11227016-1686-4a95-918d-c548a60a70f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.999016] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19/volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.999815] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fa198ba-40b0-4d56-9f02-e0fd72cdaf97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.022849] env[68279]: DEBUG oslo_vmware.api [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 980.022849] env[68279]: value = "task-2963448" [ 980.022849] env[68279]: _type = "Task" [ 980.022849] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.033803] env[68279]: DEBUG oslo_vmware.api [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963448, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.239171] env[68279]: INFO nova.compute.manager [-] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Took 1.27 seconds to deallocate network for instance. [ 980.288295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2deb667f-fd19-4ce4-bdec-12490e1a7a56 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "99024851-0add-44b9-a70a-2e242180d6a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.461s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.380662] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5aa916a9-bf4c-41a1-b7be-f359777c2830 tempest-ListImageFiltersTestJSON-670507249 tempest-ListImageFiltersTestJSON-670507249-project-member] Lock "97c3000f-a3d8-45c1-b0a4-12eb2b22b572" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.988s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.430819] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd4f8-b0f0-797b-105c-9bb67b840e4a/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 980.432029] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32cd285-791b-4f13-958a-aa839ab05351 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.443228] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd4f8-b0f0-797b-105c-9bb67b840e4a/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 980.443474] env[68279]: ERROR oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd4f8-b0f0-797b-105c-9bb67b840e4a/disk-0.vmdk due to incomplete transfer. [ 980.446831] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1ef3ac61-f811-40fe-9b44-0297e8d21304 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.448299] env[68279]: DEBUG oslo_vmware.api [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261311} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.448583] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.448872] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 980.449200] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.449326] env[68279]: INFO nova.compute.manager [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 980.449504] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.450160] env[68279]: DEBUG nova.compute.manager [-] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 980.450867] env[68279]: DEBUG nova.network.neutron [-] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 980.458708] env[68279]: DEBUG oslo_vmware.rw_handles [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cd4f8-b0f0-797b-105c-9bb67b840e4a/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 980.459300] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Uploaded image b3d5811d-7a7e-4a27-a53f-44bd9304ade5 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 980.463102] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 980.463952] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b67a999e-613f-4420-a234-5ed6facb49f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.473468] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 980.473468] env[68279]: value = "task-2963449" [ 980.473468] env[68279]: _type = "Task" [ 980.473468] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.483801] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963449, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.548857] env[68279]: DEBUG oslo_vmware.api [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963448, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.744559] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.791026] env[68279]: DEBUG nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 980.988760] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963449, 'name': Destroy_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.032030] env[68279]: INFO nova.compute.manager [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Rebuilding instance [ 981.042378] env[68279]: DEBUG oslo_vmware.api [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963448, 'name': ReconfigVM_Task, 'duration_secs': 0.684134} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.043596] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfigured VM instance instance-00000042 to attach disk [datastore2] volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19/volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 981.052464] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73cbb47a-ac44-4583-8e11-d622cd3d9fca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.080579] env[68279]: DEBUG oslo_vmware.api [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 981.080579] env[68279]: value = "task-2963450" [ 981.080579] env[68279]: _type = "Task" [ 981.080579] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.100910] env[68279]: DEBUG oslo_vmware.api [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963450, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.101459] env[68279]: DEBUG nova.compute.manager [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.104715] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c83130-8227-49a9-a9f6-069094d699e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.265635] env[68279]: DEBUG nova.network.neutron [-] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.311561] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.329749] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b67c26c-db4f-4c74-b9db-e394ebcd24c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.336829] env[68279]: DEBUG nova.compute.manager [req-07f53b9d-f9f5-4f73-95af-e7c74cb43551 req-7e07f452-0989-4c18-8892-1ecc948896aa service nova] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Received event network-vif-deleted-58be91e3-be6b-4118-8032-e40c6f5f099d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 981.340443] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302b29c9-ffa9-4710-91a5-d23326e83dd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.379394] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f272d1-74a1-41a1-bd0b-94aa5565db1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.388933] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380a7da2-b09b-4580-824f-2f022db2ed0a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.404733] env[68279]: DEBUG nova.compute.provider_tree [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.487416] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963449, 'name': Destroy_Task, 'duration_secs': 0.639307} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.487711] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Destroyed the VM [ 981.487972] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 981.488361] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e3ff0290-381c-418c-b506-27a5e5323b80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.498019] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 981.498019] env[68279]: value = "task-2963451" [ 981.498019] env[68279]: _type = "Task" [ 981.498019] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.507743] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963451, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.596415] env[68279]: DEBUG oslo_vmware.api [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963450, 'name': ReconfigVM_Task, 'duration_secs': 0.177266} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.596759] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594679', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'name': 'volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'serial': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 981.767821] env[68279]: INFO nova.compute.manager [-] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Took 1.32 seconds to deallocate network for instance. [ 981.908672] env[68279]: DEBUG nova.scheduler.client.report [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 982.009685] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963451, 'name': RemoveSnapshot_Task, 'duration_secs': 0.367905} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.010287] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 982.010868] env[68279]: DEBUG nova.compute.manager [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.012675] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6297389f-f533-4629-b520-79a405f0d912 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.120718] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.122333] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7dcea57c-ed54-4417-a1d2-8a090aee896b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.129477] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 982.129477] env[68279]: value = "task-2963452" [ 982.129477] env[68279]: _type = "Task" [ 982.129477] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.139145] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.275694] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.299481] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "f4963730-d516-48b7-a320-8af731831a30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.299724] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "f4963730-d516-48b7-a320-8af731831a30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.414535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.576s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.414988] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 982.419115] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.719s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.420654] env[68279]: INFO nova.compute.claims [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.526686] env[68279]: INFO nova.compute.manager [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Shelve offloading [ 982.640194] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963452, 'name': PowerOffVM_Task, 'duration_secs': 0.128141} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.640482] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.640721] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.641489] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad37ca1-9c9a-4f88-985d-1c14caa59186 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.644591] env[68279]: DEBUG nova.objects.instance [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lazy-loading 'flavor' on Instance uuid c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.650595] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.650834] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00c8f25d-14c3-4ed6-8cc3-89bf37d64507 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.679079] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.679425] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.679722] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Deleting the datastore file [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.680015] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc487647-413b-4d9f-ae38-70c232a1f488 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.687107] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 982.687107] env[68279]: value = "task-2963454" [ 982.687107] env[68279]: _type = "Task" [ 982.687107] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.696433] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.924833] env[68279]: DEBUG nova.compute.utils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 982.928764] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 982.928764] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.966321] env[68279]: DEBUG nova.policy [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '036de15261e24da98e0fbdb5fe7abd15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e051515a1294ec080edeaf52d79c8f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 983.031033] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.031033] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf44d578-565c-4bae-bab9-e2e393965bda {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.039646] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 983.039646] env[68279]: value = "task-2963455" [ 983.039646] env[68279]: _type = "Task" [ 983.039646] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.048371] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.151013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cc63c454-c731-4329-8a64-7691c1673241 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.341s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.199376] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.199657] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 983.199847] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 983.201543] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 983.248480] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Successfully created port: d95a0993-73f2-4ac5-a5bb-298e3b990150 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 983.330918] env[68279]: INFO nova.compute.manager [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Rescuing [ 983.331328] env[68279]: DEBUG oslo_concurrency.lockutils [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.331391] env[68279]: DEBUG oslo_concurrency.lockutils [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.331548] env[68279]: DEBUG nova.network.neutron [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 983.429534] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 983.553887] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 983.554104] env[68279]: DEBUG nova.compute.manager [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 983.555181] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf7ce85-159b-4ebc-9387-581d92d8128c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.562376] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.562553] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.562948] env[68279]: DEBUG nova.network.neutron [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 983.856592] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ded6479-7f20-49b6-a640-dd591266e368 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.866070] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c1fe0b-6deb-4c46-b48d-a25d9907bd57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.904501] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc21f933-eef7-4806-b02d-f4b3b2ad4818 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.913551] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce867660-0fde-40ce-a6b3-f5d2884c2e1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.931547] env[68279]: DEBUG nova.compute.provider_tree [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.084702] env[68279]: DEBUG nova.network.neutron [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.897031] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Successfully updated port: d95a0993-73f2-4ac5-a5bb-298e3b990150 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.904961] env[68279]: DEBUG nova.scheduler.client.report [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.908830] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 984.910815] env[68279]: DEBUG oslo_concurrency.lockutils [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.916021] env[68279]: DEBUG nova.compute.manager [req-c3cc3bea-fe1e-4e73-a99b-0e389721a00c req-2d4a1c77-5153-4546-ada0-36033bf59f0c service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Received event network-vif-plugged-d95a0993-73f2-4ac5-a5bb-298e3b990150 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 984.916021] env[68279]: DEBUG oslo_concurrency.lockutils [req-c3cc3bea-fe1e-4e73-a99b-0e389721a00c req-2d4a1c77-5153-4546-ada0-36033bf59f0c service nova] Acquiring lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.916021] env[68279]: DEBUG oslo_concurrency.lockutils [req-c3cc3bea-fe1e-4e73-a99b-0e389721a00c req-2d4a1c77-5153-4546-ada0-36033bf59f0c service nova] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.916021] env[68279]: DEBUG oslo_concurrency.lockutils [req-c3cc3bea-fe1e-4e73-a99b-0e389721a00c req-2d4a1c77-5153-4546-ada0-36033bf59f0c service nova] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.916021] env[68279]: DEBUG nova.compute.manager [req-c3cc3bea-fe1e-4e73-a99b-0e389721a00c req-2d4a1c77-5153-4546-ada0-36033bf59f0c service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] No waiting events found dispatching network-vif-plugged-d95a0993-73f2-4ac5-a5bb-298e3b990150 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 984.916021] env[68279]: WARNING nova.compute.manager [req-c3cc3bea-fe1e-4e73-a99b-0e389721a00c req-2d4a1c77-5153-4546-ada0-36033bf59f0c service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Received unexpected event network-vif-plugged-d95a0993-73f2-4ac5-a5bb-298e3b990150 for instance with vm_state building and task_state spawning. [ 984.916021] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "refresh_cache-0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.916021] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "refresh_cache-0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.916409] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 984.962235] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 984.962489] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.962649] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 984.962838] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.962983] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 984.963153] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 984.963364] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 984.963524] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 984.963730] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 984.963903] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 984.964097] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 984.966666] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcfa47d-e88e-40f2-aa5f-df217306b45c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.981433] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818baf27-06b8-41e6-8561-9be4a1fc2773 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.989222] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 984.989479] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.989638] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 984.989815] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.989954] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 984.990110] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 984.990327] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 984.990498] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 984.990663] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 984.990822] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 984.990990] env[68279]: DEBUG nova.virt.hardware [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 984.991810] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e283ce-e3ba-48d2-9468-847707e90668 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.010792] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af41e8c-93e2-413e-8d14-5175bcd32e4e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.025831] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 985.031511] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 985.031803] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 985.032054] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bf5f7cf-dfa5-433d-9d76-00218913cd8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.051945] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 985.051945] env[68279]: value = "task-2963456" [ 985.051945] env[68279]: _type = "Task" [ 985.051945] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.061112] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963456, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.083185] env[68279]: DEBUG nova.network.neutron [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Updating instance_info_cache with network_info: [{"id": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "address": "fa:16:3e:74:43:f8", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a76bc3-d8", "ovs_interfaceid": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.418529] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.999s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.419171] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 985.421991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.661s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.423484] env[68279]: INFO nova.compute.claims [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.457059] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.564708] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963456, 'name': CreateVM_Task, 'duration_secs': 0.354544} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.564884] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 985.565322] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.565498] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.565819] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 985.566096] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29bde028-e956-41d7-8b03-2dfd589d1a2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.572181] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 985.572181] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5212fff8-3ba1-eb67-0b6e-d69825cd2bee" [ 985.572181] env[68279]: _type = "Task" [ 985.572181] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.583353] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5212fff8-3ba1-eb67-0b6e-d69825cd2bee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.587797] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.623741] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Updating instance_info_cache with network_info: [{"id": "d95a0993-73f2-4ac5-a5bb-298e3b990150", "address": "fa:16:3e:b7:c4:77", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95a0993-73", "ovs_interfaceid": "d95a0993-73f2-4ac5-a5bb-298e3b990150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.844684] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.845645] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2ef159-9e22-46b5-8bd4-7f06c1234fbe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.854230] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.854547] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fcd3451-bdad-4b31-ae05-a60bc8ab9af3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.921664] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.922077] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.922212] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleting the datastore file [datastore2] 01404bab-6516-4783-8b9d-0738010b3e9b {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.922523] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8ab6bb9-d09a-490a-9519-a383cbaa66cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.930812] env[68279]: DEBUG nova.compute.utils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 985.934310] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 985.934310] env[68279]: value = "task-2963458" [ 985.934310] env[68279]: _type = "Task" [ 985.934310] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.934837] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 985.935040] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 985.948206] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.978272] env[68279]: DEBUG nova.policy [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '036de15261e24da98e0fbdb5fe7abd15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e051515a1294ec080edeaf52d79c8f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 986.086803] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5212fff8-3ba1-eb67-0b6e-d69825cd2bee, 'name': SearchDatastore_Task, 'duration_secs': 0.01281} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.087124] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.087367] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 986.087606] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.087751] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.087924] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 986.088226] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60ea1759-f402-43c0-9d88-0c7afcc113c4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.097429] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 986.097632] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 986.098383] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a77f743-2db6-406d-b183-1fba6f0077d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.104666] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 986.104666] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528271a5-a71b-a341-7643-272023a1dd9f" [ 986.104666] env[68279]: _type = "Task" [ 986.104666] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.113315] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528271a5-a71b-a341-7643-272023a1dd9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.126474] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "refresh_cache-0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.126821] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Instance network_info: |[{"id": "d95a0993-73f2-4ac5-a5bb-298e3b990150", "address": "fa:16:3e:b7:c4:77", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95a0993-73", "ovs_interfaceid": "d95a0993-73f2-4ac5-a5bb-298e3b990150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 986.127263] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:c4:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cb94a1a-f287-46e7-b63b-ec692c2141b4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd95a0993-73f2-4ac5-a5bb-298e3b990150', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.134811] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Creating folder: Project (9e051515a1294ec080edeaf52d79c8f0). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 986.135286] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76b45f07-156e-4f34-9dfe-f79fb84572d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.150978] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Created folder: Project (9e051515a1294ec080edeaf52d79c8f0) in parent group-v594445. [ 986.151199] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Creating folder: Instances. Parent ref: group-v594681. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 986.151433] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5e99f76-4911-4c97-863b-cf184ec5b129 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.162569] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Created folder: Instances in parent group-v594681. [ 986.162843] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.163079] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.163295] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f225394c-d594-480c-8550-08ce8c57f48e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.184294] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.184294] env[68279]: value = "task-2963461" [ 986.184294] env[68279]: _type = "Task" [ 986.184294] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.194152] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963461, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.239611] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Successfully created port: 7aacc52c-518b-4424-b4a3-686a01b73bd9 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 986.338609] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bac162-4ba0-542f-c9fa-8a0aab02d7d1/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 986.339633] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591ea05a-a156-49d4-85f2-c14e01d61d44 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.347570] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bac162-4ba0-542f-c9fa-8a0aab02d7d1/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 986.348996] env[68279]: ERROR oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bac162-4ba0-542f-c9fa-8a0aab02d7d1/disk-0.vmdk due to incomplete transfer. [ 986.348996] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b02b89d8-5961-488c-85df-0f960061d257 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.357141] env[68279]: DEBUG oslo_vmware.rw_handles [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bac162-4ba0-542f-c9fa-8a0aab02d7d1/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 986.357529] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Uploaded image c23360f2-77b9-4b89-9c6d-1aafb5e74c2f to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 986.359445] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 986.359726] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d3e3c667-5143-4dbb-8ee2-1c9f0203a466 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.368523] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 986.368523] env[68279]: value = "task-2963462" [ 986.368523] env[68279]: _type = "Task" [ 986.368523] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.378384] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963462, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.436258] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 986.464530] env[68279]: DEBUG oslo_vmware.api [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151621} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.464813] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.465826] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.465826] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.485565] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.486163] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4111a47e-b3ee-477b-9d4d-27e550d7729a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.494682] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 986.494682] env[68279]: value = "task-2963463" [ 986.494682] env[68279]: _type = "Task" [ 986.494682] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.499172] env[68279]: INFO nova.scheduler.client.report [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted allocations for instance 01404bab-6516-4783-8b9d-0738010b3e9b [ 986.526930] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.619114] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528271a5-a71b-a341-7643-272023a1dd9f, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.622624] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4916b8ca-0963-477a-8629-549ee20317a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.629078] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 986.629078] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52be5170-59ce-ff43-07b5-7b5b6e72e8e9" [ 986.629078] env[68279]: _type = "Task" [ 986.629078] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.640316] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52be5170-59ce-ff43-07b5-7b5b6e72e8e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.695568] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963461, 'name': CreateVM_Task, 'duration_secs': 0.402003} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.698279] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 986.699211] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.699496] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.699739] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 986.700014] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd3c10c6-89e0-4cab-9e10-04e0a3f49eb0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.706638] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 986.706638] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524d3817-aa4c-268b-b476-470f37b2931c" [ 986.706638] env[68279]: _type = "Task" [ 986.706638] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.716449] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524d3817-aa4c-268b-b476-470f37b2931c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.742785] env[68279]: DEBUG nova.compute.manager [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Received event network-changed-d95a0993-73f2-4ac5-a5bb-298e3b990150 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 986.743027] env[68279]: DEBUG nova.compute.manager [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Refreshing instance network info cache due to event network-changed-d95a0993-73f2-4ac5-a5bb-298e3b990150. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 986.743194] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Acquiring lock "refresh_cache-0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.743340] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Acquired lock "refresh_cache-0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.743498] env[68279]: DEBUG nova.network.neutron [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Refreshing network info cache for port d95a0993-73f2-4ac5-a5bb-298e3b990150 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 986.880478] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963462, 'name': Destroy_Task, 'duration_secs': 0.440354} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.883472] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Destroyed the VM [ 986.883757] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 986.884247] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d333857b-c352-420b-ac2c-4a0bb2b0d407 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.897000] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51769825-480e-4918-b5aa-697763c7eb5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.901505] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 986.901505] env[68279]: value = "task-2963464" [ 986.901505] env[68279]: _type = "Task" [ 986.901505] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.908435] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75bd2bae-d5c1-4d0f-82c7-491be440114d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.915308] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963464, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.947395] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e1b8f3-642b-4686-a454-2fd78971886a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.956152] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64cceed-3df9-446e-9e6e-c7c33162f9b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.970200] env[68279]: DEBUG nova.compute.provider_tree [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.004793] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963463, 'name': PowerOffVM_Task, 'duration_secs': 0.249025} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.005090] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.005856] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a6f0fe-d798-4ffc-a939-0f4ec269cc1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.028420] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.029632] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2da7cd1-11d6-44d2-9c7c-7d5e7193c5a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.058965] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.059293] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-835f8ac8-c49b-46a6-8dd4-b70c86f88802 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.067313] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 987.067313] env[68279]: value = "task-2963465" [ 987.067313] env[68279]: _type = "Task" [ 987.067313] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.075469] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.140667] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52be5170-59ce-ff43-07b5-7b5b6e72e8e9, 'name': SearchDatastore_Task, 'duration_secs': 0.015545} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.140938] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.141223] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 987.141488] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcf72822-edc4-426f-b05c-d8af026fa9e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.148283] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 987.148283] env[68279]: value = "task-2963466" [ 987.148283] env[68279]: _type = "Task" [ 987.148283] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.156670] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.216845] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524d3817-aa4c-268b-b476-470f37b2931c, 'name': SearchDatastore_Task, 'duration_secs': 0.019701} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.217267] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.217547] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.217893] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.218137] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.218389] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.218713] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-609fa2e3-9ab2-4763-a534-d74caddd43d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.228951] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.229147] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.230047] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72659f95-c849-465e-873a-ae2b858c3c8d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.235687] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 987.235687] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3d362-6dcd-4a84-5f5f-46928041d625" [ 987.235687] env[68279]: _type = "Task" [ 987.235687] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.243783] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3d362-6dcd-4a84-5f5f-46928041d625, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.416257] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963464, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.451964] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 987.464177] env[68279]: DEBUG nova.network.neutron [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Updated VIF entry in instance network info cache for port d95a0993-73f2-4ac5-a5bb-298e3b990150. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.465033] env[68279]: DEBUG nova.network.neutron [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Updating instance_info_cache with network_info: [{"id": "d95a0993-73f2-4ac5-a5bb-298e3b990150", "address": "fa:16:3e:b7:c4:77", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95a0993-73", "ovs_interfaceid": "d95a0993-73f2-4ac5-a5bb-298e3b990150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.473414] env[68279]: DEBUG nova.scheduler.client.report [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.479888] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 987.480170] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.480323] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.480502] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.480647] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.480794] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 987.480997] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 987.481172] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 987.481339] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 987.481503] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 987.481703] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 987.483011] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578725c2-1fa6-4338-ba2a-027b2e729ece {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.496964] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d0b9f7-c17a-48b6-bf93-3de7b36c0d4c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.583162] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 987.583537] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.583897] env[68279]: DEBUG oslo_concurrency.lockutils [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.584334] env[68279]: DEBUG oslo_concurrency.lockutils [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.584514] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.584984] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac0150c5-80de-41b9-a917-90d5eb551415 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.605546] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.605546] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.605546] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94d73ca5-d73c-47dd-aeb0-587d07c51cdc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.621026] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 987.621026] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527d143f-868d-8578-84e9-cecc94e3dfbe" [ 987.621026] env[68279]: _type = "Task" [ 987.621026] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.633387] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527d143f-868d-8578-84e9-cecc94e3dfbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.667871] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963466, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.747976] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b3d362-6dcd-4a84-5f5f-46928041d625, 'name': SearchDatastore_Task, 'duration_secs': 0.010452} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.748226] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecbda413-ffe1-4e6b-a9ea-7f8658c77a74 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.754560] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 987.754560] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52499401-017b-b08a-d50e-a5f0d4a7aa8f" [ 987.754560] env[68279]: _type = "Task" [ 987.754560] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.763540] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52499401-017b-b08a-d50e-a5f0d4a7aa8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.913328] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963464, 'name': RemoveSnapshot_Task, 'duration_secs': 0.792734} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.913608] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 987.913903] env[68279]: DEBUG nova.compute.manager [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 987.914652] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69b8645-a5cf-417c-bc21-36e9e1937149 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.925175] env[68279]: DEBUG nova.compute.manager [req-70aa3e84-69d9-47a5-94ba-b6edf2bd905b req-bbec1c11-791b-4a1f-b60f-dd8c172e577b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Received event network-vif-plugged-7aacc52c-518b-4424-b4a3-686a01b73bd9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 987.925175] env[68279]: DEBUG oslo_concurrency.lockutils [req-70aa3e84-69d9-47a5-94ba-b6edf2bd905b req-bbec1c11-791b-4a1f-b60f-dd8c172e577b service nova] Acquiring lock "e6f39528-384c-456b-8155-a6856bab3ce0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.925175] env[68279]: DEBUG oslo_concurrency.lockutils [req-70aa3e84-69d9-47a5-94ba-b6edf2bd905b req-bbec1c11-791b-4a1f-b60f-dd8c172e577b service nova] Lock "e6f39528-384c-456b-8155-a6856bab3ce0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.925417] env[68279]: DEBUG oslo_concurrency.lockutils [req-70aa3e84-69d9-47a5-94ba-b6edf2bd905b req-bbec1c11-791b-4a1f-b60f-dd8c172e577b service nova] Lock "e6f39528-384c-456b-8155-a6856bab3ce0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.925417] env[68279]: DEBUG nova.compute.manager [req-70aa3e84-69d9-47a5-94ba-b6edf2bd905b req-bbec1c11-791b-4a1f-b60f-dd8c172e577b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] No waiting events found dispatching network-vif-plugged-7aacc52c-518b-4424-b4a3-686a01b73bd9 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.925564] env[68279]: WARNING nova.compute.manager [req-70aa3e84-69d9-47a5-94ba-b6edf2bd905b req-bbec1c11-791b-4a1f-b60f-dd8c172e577b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Received unexpected event network-vif-plugged-7aacc52c-518b-4424-b4a3-686a01b73bd9 for instance with vm_state building and task_state spawning. [ 987.967393] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Releasing lock "refresh_cache-0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.967714] env[68279]: DEBUG nova.compute.manager [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Received event network-vif-unplugged-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 987.967836] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Acquiring lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.968046] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.968267] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.968411] env[68279]: DEBUG nova.compute.manager [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] No waiting events found dispatching network-vif-unplugged-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.968593] env[68279]: WARNING nova.compute.manager [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Received unexpected event network-vif-unplugged-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 for instance with vm_state shelved_offloaded and task_state None. [ 987.968798] env[68279]: DEBUG nova.compute.manager [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Received event network-changed-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 987.968969] env[68279]: DEBUG nova.compute.manager [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Refreshing instance network info cache due to event network-changed-f8a76bc3-d8c4-428b-9e5e-3f8b55802c07. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 987.969166] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Acquiring lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.969345] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Acquired lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.969511] env[68279]: DEBUG nova.network.neutron [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Refreshing network info cache for port f8a76bc3-d8c4-428b-9e5e-3f8b55802c07 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.978279] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.978774] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 987.981437] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.307s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.982842] env[68279]: INFO nova.compute.claims [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.130718] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527d143f-868d-8578-84e9-cecc94e3dfbe, 'name': SearchDatastore_Task, 'duration_secs': 0.038612} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.131526] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f78ca90d-1946-4126-875f-76618baa1b30 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.137192] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 988.137192] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a10d2a-475f-f524-461c-78ac5e3fe247" [ 988.137192] env[68279]: _type = "Task" [ 988.137192] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.145247] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a10d2a-475f-f524-461c-78ac5e3fe247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.157636] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963466, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.157867] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 988.158089] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.158318] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a34b085-d9dd-42ae-a550-cab5ff37f1e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.165202] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 988.165202] env[68279]: value = "task-2963467" [ 988.165202] env[68279]: _type = "Task" [ 988.165202] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.174540] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963467, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.265578] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52499401-017b-b08a-d50e-a5f0d4a7aa8f, 'name': SearchDatastore_Task, 'duration_secs': 0.033263} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.265865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.266159] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9/0d8f8797-649e-45de-8b3c-0b47e1d4cdd9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.266420] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32433f91-90f6-4292-89fe-5c66432cda34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.276534] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 988.276534] env[68279]: value = "task-2963468" [ 988.276534] env[68279]: _type = "Task" [ 988.276534] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.284796] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.306236] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Successfully updated port: 7aacc52c-518b-4424-b4a3-686a01b73bd9 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.371379] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "01404bab-6516-4783-8b9d-0738010b3e9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.429167] env[68279]: INFO nova.compute.manager [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Shelve offloading [ 988.489986] env[68279]: DEBUG nova.compute.utils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 988.491703] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 988.491897] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 988.530646] env[68279]: DEBUG nova.policy [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '036de15261e24da98e0fbdb5fe7abd15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e051515a1294ec080edeaf52d79c8f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 988.654397] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a10d2a-475f-f524-461c-78ac5e3fe247, 'name': SearchDatastore_Task, 'duration_secs': 0.010429} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.654693] env[68279]: DEBUG oslo_concurrency.lockutils [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.654958] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. {{(pid=68279) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 988.660021] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80115604-0d9c-4de2-8c4e-589ab6899e82 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.671278] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 988.671278] env[68279]: value = "task-2963469" [ 988.671278] env[68279]: _type = "Task" [ 988.671278] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.683712] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963467, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180277} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.685939] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.690154] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01d2826-3170-421a-a6fe-f7786c94b941 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.696016] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.698059] env[68279]: DEBUG nova.network.neutron [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Updated VIF entry in instance network info cache for port f8a76bc3-d8c4-428b-9e5e-3f8b55802c07. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.698479] env[68279]: DEBUG nova.network.neutron [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Updating instance_info_cache with network_info: [{"id": "f8a76bc3-d8c4-428b-9e5e-3f8b55802c07", "address": "fa:16:3e:74:43:f8", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": null, "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf8a76bc3-d8", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.717477] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.718221] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-810dfb5c-e0d1-4481-9e51-78e06dd95399 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.742034] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 988.742034] env[68279]: value = "task-2963470" [ 988.742034] env[68279]: _type = "Task" [ 988.742034] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.754394] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963470, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.789018] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963468, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.808888] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "refresh_cache-e6f39528-384c-456b-8155-a6856bab3ce0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.809102] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "refresh_cache-e6f39528-384c-456b-8155-a6856bab3ce0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.809344] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.908076] env[68279]: DEBUG nova.compute.manager [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Received event network-changed-7aacc52c-518b-4424-b4a3-686a01b73bd9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.908076] env[68279]: DEBUG nova.compute.manager [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Refreshing instance network info cache due to event network-changed-7aacc52c-518b-4424-b4a3-686a01b73bd9. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 988.908076] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] Acquiring lock "refresh_cache-e6f39528-384c-456b-8155-a6856bab3ce0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.933234] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.933234] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3540389a-8099-4f0e-a429-f149277b2cef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.946197] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 988.946197] env[68279]: value = "task-2963471" [ 988.946197] env[68279]: _type = "Task" [ 988.946197] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.958133] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 988.958133] env[68279]: DEBUG nova.compute.manager [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 988.958133] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49ef253-55ed-44fd-b95b-80d0678c5062 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.968842] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.968842] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.968842] env[68279]: DEBUG nova.network.neutron [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.995279] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 989.063030] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Successfully created port: ec6a474d-d634-499e-9236-c78e24a8792f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 989.187039] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963469, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.218922] env[68279]: DEBUG oslo_concurrency.lockutils [req-2f9a5019-61bd-45c3-8547-fb1942d14571 req-392d9035-7953-425f-b049-bd7f20ab49df service nova] Releasing lock "refresh_cache-01404bab-6516-4783-8b9d-0738010b3e9b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.255916] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963470, 'name': ReconfigVM_Task, 'duration_secs': 0.356395} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.256228] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9/99024851-0add-44b9-a70a-2e242180d6a9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.256874] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c1993e9-21fd-48c8-a171-99be0fa3b001 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.264570] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 989.264570] env[68279]: value = "task-2963472" [ 989.264570] env[68279]: _type = "Task" [ 989.264570] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.273605] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963472, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.287224] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963468, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524545} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.289842] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9/0d8f8797-649e-45de-8b3c-0b47e1d4cdd9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.290078] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.290555] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb00dc77-bb8a-407d-97e0-27065782e44c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.298377] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 989.298377] env[68279]: value = "task-2963473" [ 989.298377] env[68279]: _type = "Task" [ 989.298377] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.309419] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.380386] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.468018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51dad651-53f2-4a36-b1bd-1745409fa3ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.477944] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51364d52-7959-4ab8-9807-32a919e1c29b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.522439] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66052113-6ae1-45a1-a6e8-af40d63ebe21 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.531125] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65842f12-f92c-40fa-b56a-8e6871839088 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.549085] env[68279]: DEBUG nova.compute.provider_tree [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.682911] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577074} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.683147] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk. [ 989.683995] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef342de-0eef-4d80-97a4-d3fb9b2c850d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.711811] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.712091] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c815549-0a1c-4249-b81b-1806a1df629d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.730491] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 989.730491] env[68279]: value = "task-2963474" [ 989.730491] env[68279]: _type = "Task" [ 989.730491] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.740251] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963474, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.776783] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963472, 'name': Rename_Task, 'duration_secs': 0.198112} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.777086] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.777367] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8001b9b-34d9-4e49-aad7-5462016301ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.784550] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 989.784550] env[68279]: value = "task-2963475" [ 989.784550] env[68279]: _type = "Task" [ 989.784550] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.794946] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.807709] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102579} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.808014] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 989.808838] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7bdb95-09df-41d4-988e-43cd7e0a6adc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.839115] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9/0d8f8797-649e-45de-8b3c-0b47e1d4cdd9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.840258] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Updating instance_info_cache with network_info: [{"id": "7aacc52c-518b-4424-b4a3-686a01b73bd9", "address": "fa:16:3e:8a:f6:b8", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aacc52c-51", "ovs_interfaceid": "7aacc52c-518b-4424-b4a3-686a01b73bd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.841505] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebd46a68-38a2-4481-800e-d7d08d75c40c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.857238] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "refresh_cache-e6f39528-384c-456b-8155-a6856bab3ce0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.857564] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Instance network_info: |[{"id": "7aacc52c-518b-4424-b4a3-686a01b73bd9", "address": "fa:16:3e:8a:f6:b8", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aacc52c-51", "ovs_interfaceid": "7aacc52c-518b-4424-b4a3-686a01b73bd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 989.860429] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] Acquired lock "refresh_cache-e6f39528-384c-456b-8155-a6856bab3ce0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.860645] env[68279]: DEBUG nova.network.neutron [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Refreshing network info cache for port 7aacc52c-518b-4424-b4a3-686a01b73bd9 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.861836] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:f6:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cb94a1a-f287-46e7-b63b-ec692c2141b4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7aacc52c-518b-4424-b4a3-686a01b73bd9', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.869439] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.873440] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.874440] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25f88d5b-7ea5-4285-8393-9da555b541fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.893555] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 989.893555] env[68279]: value = "task-2963476" [ 989.893555] env[68279]: _type = "Task" [ 989.893555] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.900488] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.900488] env[68279]: value = "task-2963477" [ 989.900488] env[68279]: _type = "Task" [ 989.900488] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.904236] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.913638] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963477, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.024606] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 990.044759] env[68279]: DEBUG nova.network.neutron [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa047ea62-0c", "ovs_interfaceid": "a047ea62-0c74-4967-820e-75553a4d8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.048189] env[68279]: DEBUG nova.scheduler.client.report [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.064295] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 990.065015] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.065015] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 990.065015] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.065223] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 990.065223] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 990.065573] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 990.065573] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 990.065797] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 990.065936] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 990.066141] env[68279]: DEBUG nova.virt.hardware [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 990.067858] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575b09d2-c339-406f-9ae6-9dfbef33d7f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.078389] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17eec0ec-bd46-4bd0-8e9e-a5b7bbb99f73 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.175539] env[68279]: DEBUG nova.network.neutron [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Updated VIF entry in instance network info cache for port 7aacc52c-518b-4424-b4a3-686a01b73bd9. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.175962] env[68279]: DEBUG nova.network.neutron [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Updating instance_info_cache with network_info: [{"id": "7aacc52c-518b-4424-b4a3-686a01b73bd9", "address": "fa:16:3e:8a:f6:b8", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7aacc52c-51", "ovs_interfaceid": "7aacc52c-518b-4424-b4a3-686a01b73bd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.242713] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963474, 'name': ReconfigVM_Task, 'duration_secs': 0.388094} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.243012] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfigured VM instance instance-00000042 to attach disk [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5/01e502b7-2447-4972-9fe7-fd69f76ef71f-rescue.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.244042] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c328f28f-bef7-4bb8-90b2-085cd6c3b88c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.280667] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be80586a-0c12-45af-b657-40b45aefbd31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.301131] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963475, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.302571] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 990.302571] env[68279]: value = "task-2963478" [ 990.302571] env[68279]: _type = "Task" [ 990.302571] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.310989] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.406201] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963476, 'name': ReconfigVM_Task, 'duration_secs': 0.340423} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.409685] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9/0d8f8797-649e-45de-8b3c-0b47e1d4cdd9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.410318] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a5d8f68-9fce-406d-992b-9af672b6abb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.417968] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963477, 'name': CreateVM_Task, 'duration_secs': 0.448654} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.419067] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.419414] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 990.419414] env[68279]: value = "task-2963479" [ 990.419414] env[68279]: _type = "Task" [ 990.419414] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.420038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.420204] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.420517] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.420802] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4f6f8f3-029c-4bee-83ed-9cc2a589acb7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.431713] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 990.431713] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d2b614-3033-8bc2-ba13-91dae1912c19" [ 990.431713] env[68279]: _type = "Task" [ 990.431713] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.434650] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963479, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.442566] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d2b614-3033-8bc2-ba13-91dae1912c19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.546132] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.556206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.556735] env[68279]: DEBUG nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 990.559659] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.767s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.561513] env[68279]: INFO nova.compute.claims [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.679084] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0e27e9b-37ee-4217-a1fb-767b03b4238f req-0ecf0570-41a6-469e-b0d5-b6295ef9f10b service nova] Releasing lock "refresh_cache-e6f39528-384c-456b-8155-a6856bab3ce0" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.680495] env[68279]: DEBUG nova.compute.manager [req-513a0f9b-6df6-4dc6-9ea0-8ebebc0589ad req-97079201-3323-405c-b040-88e69b990c0a service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Received event network-vif-plugged-ec6a474d-d634-499e-9236-c78e24a8792f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.680730] env[68279]: DEBUG oslo_concurrency.lockutils [req-513a0f9b-6df6-4dc6-9ea0-8ebebc0589ad req-97079201-3323-405c-b040-88e69b990c0a service nova] Acquiring lock "0731fdf9-f90c-46a4-9165-f6d91767b51b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.680947] env[68279]: DEBUG oslo_concurrency.lockutils [req-513a0f9b-6df6-4dc6-9ea0-8ebebc0589ad req-97079201-3323-405c-b040-88e69b990c0a service nova] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.681290] env[68279]: DEBUG oslo_concurrency.lockutils [req-513a0f9b-6df6-4dc6-9ea0-8ebebc0589ad req-97079201-3323-405c-b040-88e69b990c0a service nova] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.681483] env[68279]: DEBUG nova.compute.manager [req-513a0f9b-6df6-4dc6-9ea0-8ebebc0589ad req-97079201-3323-405c-b040-88e69b990c0a service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] No waiting events found dispatching network-vif-plugged-ec6a474d-d634-499e-9236-c78e24a8792f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 990.681654] env[68279]: WARNING nova.compute.manager [req-513a0f9b-6df6-4dc6-9ea0-8ebebc0589ad req-97079201-3323-405c-b040-88e69b990c0a service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Received unexpected event network-vif-plugged-ec6a474d-d634-499e-9236-c78e24a8792f for instance with vm_state building and task_state spawning. [ 990.759854] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Successfully updated port: ec6a474d-d634-499e-9236-c78e24a8792f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.802421] env[68279]: DEBUG oslo_vmware.api [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963475, 'name': PowerOnVM_Task, 'duration_secs': 0.559171} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.803489] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.803782] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.803979] env[68279]: DEBUG nova.compute.manager [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 990.805166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a87b32-fad1-4c83-a504-350f7bb73e62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.808585] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77e7efe-9fed-4adc-949f-80f86e384be0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.825223] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963478, 'name': ReconfigVM_Task, 'duration_secs': 0.182734} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.825480] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.825937] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.826152] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ba7f32b-84e4-4519-bd3a-f0804c48eaa8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.827524] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37a8e233-5a5d-4f9d-9d13-9af66a8d2b1c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.834984] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 990.834984] env[68279]: value = "task-2963481" [ 990.834984] env[68279]: _type = "Task" [ 990.834984] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.844108] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.895662] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.895953] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.896189] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleting the datastore file [datastore1] daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.896508] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f03aeb5-341b-4547-afbb-6bb764a6de24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.905209] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 990.905209] env[68279]: value = "task-2963482" [ 990.905209] env[68279]: _type = "Task" [ 990.905209] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.915143] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.932868] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963479, 'name': Rename_Task, 'duration_secs': 0.155929} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.933196] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.933985] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37afbecf-db6b-4f25-ae69-bf321dd485c5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.947981] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d2b614-3033-8bc2-ba13-91dae1912c19, 'name': SearchDatastore_Task, 'duration_secs': 0.010775} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.951440] env[68279]: DEBUG nova.compute.manager [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received event network-vif-unplugged-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.951662] env[68279]: DEBUG oslo_concurrency.lockutils [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.951878] env[68279]: DEBUG oslo_concurrency.lockutils [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.952061] env[68279]: DEBUG oslo_concurrency.lockutils [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.952234] env[68279]: DEBUG nova.compute.manager [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] No waiting events found dispatching network-vif-unplugged-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 990.952404] env[68279]: WARNING nova.compute.manager [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received unexpected event network-vif-unplugged-a047ea62-0c74-4967-820e-75553a4d8d7c for instance with vm_state shelved and task_state shelving_offloading. [ 990.952559] env[68279]: DEBUG nova.compute.manager [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received event network-changed-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.952707] env[68279]: DEBUG nova.compute.manager [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Refreshing instance network info cache due to event network-changed-a047ea62-0c74-4967-820e-75553a4d8d7c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 990.952882] env[68279]: DEBUG oslo_concurrency.lockutils [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] Acquiring lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.953025] env[68279]: DEBUG oslo_concurrency.lockutils [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] Acquired lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.953186] env[68279]: DEBUG nova.network.neutron [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Refreshing network info cache for port a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.954827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.956232] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.956502] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.956675] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.956902] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.957663] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 990.957663] env[68279]: value = "task-2963483" [ 990.957663] env[68279]: _type = "Task" [ 990.957663] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.958974] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7de3ade0-0e1f-4f69-b3ed-27c9a980f20d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.972675] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963483, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.974128] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.974367] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.975534] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87ac3fae-560c-4357-8c4a-1f4292a7785d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.983543] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 990.983543] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5244cc48-1e79-2f5c-7407-e4d9f35a8831" [ 990.983543] env[68279]: _type = "Task" [ 990.983543] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.992641] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5244cc48-1e79-2f5c-7407-e4d9f35a8831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.067038] env[68279]: DEBUG nova.compute.utils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 991.069608] env[68279]: DEBUG nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 991.069775] env[68279]: DEBUG nova.network.neutron [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 991.116331] env[68279]: DEBUG nova.policy [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6dcff6c11546f9b0907917a2463755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbad607de614a809c51668c2ac0d012', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 991.262391] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "refresh_cache-0731fdf9-f90c-46a4-9165-f6d91767b51b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.262603] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "refresh_cache-0731fdf9-f90c-46a4-9165-f6d91767b51b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.262753] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.333083] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.352984] env[68279]: DEBUG oslo_vmware.api [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963481, 'name': PowerOnVM_Task, 'duration_secs': 0.482583} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.352984] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.355055] env[68279]: DEBUG nova.compute.manager [None req-16dc6acb-8571-44e4-9f63-b8c3296f4893 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.355834] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea91eef-fd69-4428-ace4-3d76e65a7eb4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.417255] env[68279]: DEBUG oslo_vmware.api [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144233} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.417537] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.417727] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.417898] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.446755] env[68279]: INFO nova.scheduler.client.report [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleted allocations for instance daccaa30-1011-4c7d-a668-05f9329ab4d5 [ 991.472766] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963483, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.473694] env[68279]: DEBUG nova.network.neutron [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Successfully created port: e77b7975-dc57-4094-9c2d-546c6bce9653 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.494994] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5244cc48-1e79-2f5c-7407-e4d9f35a8831, 'name': SearchDatastore_Task, 'duration_secs': 0.009754} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.495916] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0cbbcc4-bdc2-4ccb-a8bb-44e695287e81 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.506897] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 991.506897] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5221d2c0-4e91-c970-1e5e-eddb7836e585" [ 991.506897] env[68279]: _type = "Task" [ 991.506897] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.515368] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5221d2c0-4e91-c970-1e5e-eddb7836e585, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.570989] env[68279]: DEBUG nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 991.782136] env[68279]: DEBUG nova.network.neutron [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updated VIF entry in instance network info cache for port a047ea62-0c74-4967-820e-75553a4d8d7c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 991.782136] env[68279]: DEBUG nova.network.neutron [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa047ea62-0c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.800656] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.952695] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.969811] env[68279]: DEBUG nova.network.neutron [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Updating instance_info_cache with network_info: [{"id": "ec6a474d-d634-499e-9236-c78e24a8792f", "address": "fa:16:3e:19:96:2d", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6a474d-d6", "ovs_interfaceid": "ec6a474d-d634-499e-9236-c78e24a8792f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.977476] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963483, 'name': PowerOnVM_Task, 'duration_secs': 0.569102} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.977731] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.977921] env[68279]: INFO nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Took 7.07 seconds to spawn the instance on the hypervisor. [ 991.978105] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.978875] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c558a6ca-1836-42c5-b2ac-a1d8cbd319c5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.983339] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed0d585-9347-4c98-b937-3383bd625dc2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.997139] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e687b6-b411-4147-908a-1f487ef9c89d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.035056] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745e2846-f1a8-43cd-aebf-e6c464b74b62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.048628] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fb4eb4-94ec-4bc6-97a3-a6fccf372476 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.053327] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5221d2c0-4e91-c970-1e5e-eddb7836e585, 'name': SearchDatastore_Task, 'duration_secs': 0.010812} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.053587] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.053840] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] e6f39528-384c-456b-8155-a6856bab3ce0/e6f39528-384c-456b-8155-a6856bab3ce0.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.054480] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ec434dd-8985-4f37-9e73-1765826ed045 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.065166] env[68279]: DEBUG nova.compute.provider_tree [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.067712] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 992.067712] env[68279]: value = "task-2963484" [ 992.067712] env[68279]: _type = "Task" [ 992.067712] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.080371] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.284829] env[68279]: DEBUG oslo_concurrency.lockutils [req-aab035b8-665b-4ef7-b1cb-8714ba1e77ac req-a49ca6df-521e-4b65-a124-f1519f845fa9 service nova] Releasing lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.342939] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "99024851-0add-44b9-a70a-2e242180d6a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.343322] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "99024851-0add-44b9-a70a-2e242180d6a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.343616] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "99024851-0add-44b9-a70a-2e242180d6a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.343827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "99024851-0add-44b9-a70a-2e242180d6a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.344112] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "99024851-0add-44b9-a70a-2e242180d6a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.346840] env[68279]: INFO nova.compute.manager [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Terminating instance [ 992.472645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "refresh_cache-0731fdf9-f90c-46a4-9165-f6d91767b51b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.473027] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Instance network_info: |[{"id": "ec6a474d-d634-499e-9236-c78e24a8792f", "address": "fa:16:3e:19:96:2d", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6a474d-d6", "ovs_interfaceid": "ec6a474d-d634-499e-9236-c78e24a8792f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 992.473780] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:96:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cb94a1a-f287-46e7-b63b-ec692c2141b4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec6a474d-d634-499e-9236-c78e24a8792f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.485346] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.485788] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.486026] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b6b0c4b-76b1-44cd-948a-02c36073063c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.514266] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.514266] env[68279]: value = "task-2963485" [ 992.514266] env[68279]: _type = "Task" [ 992.514266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.515361] env[68279]: INFO nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Took 24.88 seconds to build instance. [ 992.525275] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963485, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.570268] env[68279]: DEBUG nova.scheduler.client.report [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.587037] env[68279]: DEBUG nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 992.589130] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478485} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.590284] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] e6f39528-384c-456b-8155-a6856bab3ce0/e6f39528-384c-456b-8155-a6856bab3ce0.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.590284] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.590284] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-061014ca-105b-405e-89a8-f8e888a477bc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.599304] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 992.599304] env[68279]: value = "task-2963486" [ 992.599304] env[68279]: _type = "Task" [ 992.599304] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.612848] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.625353] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 992.625680] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.625847] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 992.626046] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.626241] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 992.626427] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 992.626658] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 992.626833] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 992.627029] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 992.627258] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 992.627433] env[68279]: DEBUG nova.virt.hardware [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 992.628379] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6619ceb9-8ca3-48c6-886c-ea1b29e99d88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.638243] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e76a6d-abd6-4128-b386-629099b2c25b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.841851] env[68279]: DEBUG nova.compute.manager [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Received event network-changed-ec6a474d-d634-499e-9236-c78e24a8792f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.842091] env[68279]: DEBUG nova.compute.manager [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Refreshing instance network info cache due to event network-changed-ec6a474d-d634-499e-9236-c78e24a8792f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 992.842308] env[68279]: DEBUG oslo_concurrency.lockutils [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] Acquiring lock "refresh_cache-0731fdf9-f90c-46a4-9165-f6d91767b51b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.842396] env[68279]: DEBUG oslo_concurrency.lockutils [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] Acquired lock "refresh_cache-0731fdf9-f90c-46a4-9165-f6d91767b51b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.842558] env[68279]: DEBUG nova.network.neutron [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Refreshing network info cache for port ec6a474d-d634-499e-9236-c78e24a8792f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.852312] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "refresh_cache-99024851-0add-44b9-a70a-2e242180d6a9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.852398] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquired lock "refresh_cache-99024851-0add-44b9-a70a-2e242180d6a9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.852548] env[68279]: DEBUG nova.network.neutron [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.000940] env[68279]: DEBUG nova.network.neutron [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Successfully updated port: e77b7975-dc57-4094-9c2d-546c6bce9653 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.022734] env[68279]: DEBUG nova.compute.manager [req-e40e4119-3339-4b5a-b873-52c444420bde req-377cacb2-16e5-4504-a6df-dd2c0fab3f43 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Received event network-vif-plugged-e77b7975-dc57-4094-9c2d-546c6bce9653 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.022990] env[68279]: DEBUG oslo_concurrency.lockutils [req-e40e4119-3339-4b5a-b873-52c444420bde req-377cacb2-16e5-4504-a6df-dd2c0fab3f43 service nova] Acquiring lock "1bd92b53-46c0-4b63-be20-857cffed87cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.023254] env[68279]: DEBUG oslo_concurrency.lockutils [req-e40e4119-3339-4b5a-b873-52c444420bde req-377cacb2-16e5-4504-a6df-dd2c0fab3f43 service nova] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.023500] env[68279]: DEBUG oslo_concurrency.lockutils [req-e40e4119-3339-4b5a-b873-52c444420bde req-377cacb2-16e5-4504-a6df-dd2c0fab3f43 service nova] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.023715] env[68279]: DEBUG nova.compute.manager [req-e40e4119-3339-4b5a-b873-52c444420bde req-377cacb2-16e5-4504-a6df-dd2c0fab3f43 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] No waiting events found dispatching network-vif-plugged-e77b7975-dc57-4094-9c2d-546c6bce9653 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 993.023926] env[68279]: WARNING nova.compute.manager [req-e40e4119-3339-4b5a-b873-52c444420bde req-377cacb2-16e5-4504-a6df-dd2c0fab3f43 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Received unexpected event network-vif-plugged-e77b7975-dc57-4094-9c2d-546c6bce9653 for instance with vm_state building and task_state spawning. [ 993.024630] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.399s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.034484] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963485, 'name': CreateVM_Task, 'duration_secs': 0.506801} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.034484] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.034713] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.034865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.035247] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 993.035551] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0dc066b-a26c-496f-968f-63f997b75f90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.042266] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 993.042266] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f5dd06-877e-107f-1391-484c6d158280" [ 993.042266] env[68279]: _type = "Task" [ 993.042266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.053036] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f5dd06-877e-107f-1391-484c6d158280, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.078909] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.079550] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 993.082751] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.238s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.084208] env[68279]: INFO nova.compute.claims [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.110359] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070571} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.110757] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.111583] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ad8221-7bc3-4615-9221-c3c014f71f2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.136890] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] e6f39528-384c-456b-8155-a6856bab3ce0/e6f39528-384c-456b-8155-a6856bab3ce0.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.137487] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5609319-c69b-48b1-a483-acd91c2cd182 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.159266] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 993.159266] env[68279]: value = "task-2963487" [ 993.159266] env[68279]: _type = "Task" [ 993.159266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.167825] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963487, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.282020] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.378327] env[68279]: DEBUG nova.network.neutron [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 993.406054] env[68279]: INFO nova.compute.manager [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Unrescuing [ 993.406355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.406509] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquired lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.406671] env[68279]: DEBUG nova.network.neutron [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.455131] env[68279]: DEBUG nova.network.neutron [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.503652] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "refresh_cache-1bd92b53-46c0-4b63-be20-857cffed87cd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.503804] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "refresh_cache-1bd92b53-46c0-4b63-be20-857cffed87cd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.503957] env[68279]: DEBUG nova.network.neutron [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.527891] env[68279]: DEBUG nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 993.554856] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f5dd06-877e-107f-1391-484c6d158280, 'name': SearchDatastore_Task, 'duration_secs': 0.010558} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.556913] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.557195] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.557556] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.558187] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.558187] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.558187] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc761901-bf29-4f8b-b1e2-96925132953c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.568047] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.568259] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 993.568992] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70f06f35-e210-444e-9766-3cd7082a31a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.574613] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 993.574613] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e93b9b-352c-2dbf-f209-654a4e712e9d" [ 993.574613] env[68279]: _type = "Task" [ 993.574613] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.584711] env[68279]: DEBUG nova.compute.utils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 993.586017] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e93b9b-352c-2dbf-f209-654a4e712e9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.586309] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 993.586472] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 993.594416] env[68279]: DEBUG nova.network.neutron [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Updated VIF entry in instance network info cache for port ec6a474d-d634-499e-9236-c78e24a8792f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 993.594799] env[68279]: DEBUG nova.network.neutron [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Updating instance_info_cache with network_info: [{"id": "ec6a474d-d634-499e-9236-c78e24a8792f", "address": "fa:16:3e:19:96:2d", "network": {"id": "dad7dfa5-f696-4b12-aa38-1158fec574d0", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1337288144-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e051515a1294ec080edeaf52d79c8f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6a474d-d6", "ovs_interfaceid": "ec6a474d-d634-499e-9236-c78e24a8792f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.629148] env[68279]: DEBUG nova.policy [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce3eaacf18f94d979400de2071e05ad5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cec84ec5eaf740cab9a1c56bfb9d6244', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 993.671654] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.923683] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Successfully created port: 14ed552c-b208-40b2-969d-fea6c41a4a0d {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.957572] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Releasing lock "refresh_cache-99024851-0add-44b9-a70a-2e242180d6a9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.958037] env[68279]: DEBUG nova.compute.manager [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 993.958260] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 993.959187] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8711219-f2db-449e-ab6f-e284c017ff22 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.968649] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.969032] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9afcba1-a6fa-4cf8-aee5-cfdbdabb8ab7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.976940] env[68279]: DEBUG oslo_vmware.api [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 993.976940] env[68279]: value = "task-2963488" [ 993.976940] env[68279]: _type = "Task" [ 993.976940] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.987169] env[68279]: DEBUG oslo_vmware.api [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.046577] env[68279]: DEBUG nova.network.neutron [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.057007] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.085269] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e93b9b-352c-2dbf-f209-654a4e712e9d, 'name': SearchDatastore_Task, 'duration_secs': 0.012472} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.086096] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ad9e334-7e9a-468f-be3d-f889198c2a74 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.090254] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 994.097830] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 994.097830] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5297fc74-c6eb-10ed-9f6b-9819f3442083" [ 994.097830] env[68279]: _type = "Task" [ 994.097830] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.101574] env[68279]: DEBUG oslo_concurrency.lockutils [req-fbe33987-51e4-4596-9e89-b16f90e29582 req-7db5ba00-23e4-442d-8041-aeb8a35288ee service nova] Releasing lock "refresh_cache-0731fdf9-f90c-46a4-9165-f6d91767b51b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.122165] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5297fc74-c6eb-10ed-9f6b-9819f3442083, 'name': SearchDatastore_Task, 'duration_secs': 0.012101} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.125346] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.125455] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 0731fdf9-f90c-46a4-9165-f6d91767b51b/0731fdf9-f90c-46a4-9165-f6d91767b51b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.127270] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5c2b1dc-e890-4f18-af49-85baf9b3e520 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.138311] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 994.138311] env[68279]: value = "task-2963489" [ 994.138311] env[68279]: _type = "Task" [ 994.138311] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.152848] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963489, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.173818] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963487, 'name': ReconfigVM_Task, 'duration_secs': 0.83167} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.176820] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Reconfigured VM instance instance-00000054 to attach disk [datastore2] e6f39528-384c-456b-8155-a6856bab3ce0/e6f39528-384c-456b-8155-a6856bab3ce0.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.177733] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8484ed16-3e00-4618-ab53-4cb377d55e75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.187196] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 994.187196] env[68279]: value = "task-2963490" [ 994.187196] env[68279]: _type = "Task" [ 994.187196] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.199246] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963490, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.271310] env[68279]: DEBUG nova.network.neutron [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Updating instance_info_cache with network_info: [{"id": "e77b7975-dc57-4094-9c2d-546c6bce9653", "address": "fa:16:3e:54:5e:e5", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape77b7975-dc", "ovs_interfaceid": "e77b7975-dc57-4094-9c2d-546c6bce9653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.310587] env[68279]: DEBUG nova.network.neutron [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.492849] env[68279]: DEBUG oslo_vmware.api [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963488, 'name': PowerOffVM_Task, 'duration_secs': 0.123978} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.493929] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.494331] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 994.494750] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f64e595-b6a7-4469-9fcf-5f0d1d160e75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.527456] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 994.527759] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 994.528563] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Deleting the datastore file [datastore2] 99024851-0add-44b9-a70a-2e242180d6a9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 994.528563] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ad75293-da31-4541-bfde-2878a65195c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.538013] env[68279]: DEBUG oslo_vmware.api [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for the task: (returnval){ [ 994.538013] env[68279]: value = "task-2963492" [ 994.538013] env[68279]: _type = "Task" [ 994.538013] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.550921] env[68279]: DEBUG oslo_vmware.api [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.557447] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfbc1c6-4e4a-4248-9eba-ef7cee4d1964 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.566486] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017f4446-e0e6-4949-8acf-b44409735e90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.603073] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4128e256-8996-4a37-98f1-37d88257a57e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.614470] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac17bdb-cdc1-4f26-8db3-5e919d4a4328 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.631737] env[68279]: DEBUG nova.compute.provider_tree [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.649834] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963489, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.697750] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963490, 'name': Rename_Task, 'duration_secs': 0.167996} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.698037] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.698283] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24a72fc8-ac70-4e1f-bd3f-b1644b389716 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.705548] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 994.705548] env[68279]: value = "task-2963493" [ 994.705548] env[68279]: _type = "Task" [ 994.705548] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.713685] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.777682] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "refresh_cache-1bd92b53-46c0-4b63-be20-857cffed87cd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.778042] env[68279]: DEBUG nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Instance network_info: |[{"id": "e77b7975-dc57-4094-9c2d-546c6bce9653", "address": "fa:16:3e:54:5e:e5", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape77b7975-dc", "ovs_interfaceid": "e77b7975-dc57-4094-9c2d-546c6bce9653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 994.779100] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:5e:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e77b7975-dc57-4094-9c2d-546c6bce9653', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.786282] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 994.786538] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.786794] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-402caf5b-7f32-4a7f-b1f5-1df8f16749c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.808235] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.808235] env[68279]: value = "task-2963494" [ 994.808235] env[68279]: _type = "Task" [ 994.808235] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.813381] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Releasing lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.813859] env[68279]: DEBUG nova.objects.instance [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lazy-loading 'flavor' on Instance uuid c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.818018] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963494, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.047138] env[68279]: DEBUG nova.compute.manager [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Received event network-changed-e77b7975-dc57-4094-9c2d-546c6bce9653 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 995.047296] env[68279]: DEBUG nova.compute.manager [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Refreshing instance network info cache due to event network-changed-e77b7975-dc57-4094-9c2d-546c6bce9653. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 995.047517] env[68279]: DEBUG oslo_concurrency.lockutils [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] Acquiring lock "refresh_cache-1bd92b53-46c0-4b63-be20-857cffed87cd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.047663] env[68279]: DEBUG oslo_concurrency.lockutils [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] Acquired lock "refresh_cache-1bd92b53-46c0-4b63-be20-857cffed87cd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.047823] env[68279]: DEBUG nova.network.neutron [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Refreshing network info cache for port e77b7975-dc57-4094-9c2d-546c6bce9653 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.052780] env[68279]: DEBUG oslo_vmware.api [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Task: {'id': task-2963492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.362305} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.053069] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.054352] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.054352] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.054352] env[68279]: INFO nova.compute.manager [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Took 1.10 seconds to destroy the instance on the hypervisor. [ 995.054352] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.054352] env[68279]: DEBUG nova.compute.manager [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 995.054352] env[68279]: DEBUG nova.network.neutron [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.071089] env[68279]: DEBUG nova.network.neutron [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 995.107939] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 995.136574] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.136830] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.136990] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.137192] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.137417] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.137561] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.137701] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.137902] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.138108] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.138280] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.138456] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.139375] env[68279]: DEBUG nova.scheduler.client.report [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.143211] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49af1f0-dad7-4b66-951b-4fc83d8e2995 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.158346] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c5e6fd-02c5-452d-933c-47e82e2b94a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.162777] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963489, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601532} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.163669] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 0731fdf9-f90c-46a4-9165-f6d91767b51b/0731fdf9-f90c-46a4-9165-f6d91767b51b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.163934] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.165034] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b1224a3-86a5-475e-a7ca-5f1876305979 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.183574] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 995.183574] env[68279]: value = "task-2963495" [ 995.183574] env[68279]: _type = "Task" [ 995.183574] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.193323] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.218316] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963493, 'name': PowerOnVM_Task} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.218316] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.218725] env[68279]: INFO nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Took 7.77 seconds to spawn the instance on the hypervisor. [ 995.218952] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.219915] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f82bf4f-b113-4b82-9964-8f64f7d025e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.321531] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963494, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.322646] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dab84c9-6a1f-4446-939e-baf5ff1f574b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.348597] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.348964] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40adc112-23f3-4405-8da1-85df860af95b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.355769] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 995.355769] env[68279]: value = "task-2963496" [ 995.355769] env[68279]: _type = "Task" [ 995.355769] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.364632] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.576087] env[68279]: DEBUG nova.network.neutron [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.602329] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Successfully updated port: 14ed552c-b208-40b2-969d-fea6c41a4a0d {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.646966] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.647390] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 995.650175] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.411s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.650417] env[68279]: DEBUG nova.objects.instance [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lazy-loading 'resources' on Instance uuid 67466e30-5944-490c-a89b-2d32c59525be {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.694662] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076704} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.694962] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.695746] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3466c258-c8e3-44cd-ab40-2b5182b2e9bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.720399] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 0731fdf9-f90c-46a4-9165-f6d91767b51b/0731fdf9-f90c-46a4-9165-f6d91767b51b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.723103] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55f2f9f4-24e2-46ea-958e-3442659d0e8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.746829] env[68279]: INFO nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Took 28.06 seconds to build instance. [ 995.753280] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 995.753280] env[68279]: value = "task-2963497" [ 995.753280] env[68279]: _type = "Task" [ 995.753280] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.763459] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.807635] env[68279]: DEBUG nova.network.neutron [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Updated VIF entry in instance network info cache for port e77b7975-dc57-4094-9c2d-546c6bce9653. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.808043] env[68279]: DEBUG nova.network.neutron [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Updating instance_info_cache with network_info: [{"id": "e77b7975-dc57-4094-9c2d-546c6bce9653", "address": "fa:16:3e:54:5e:e5", "network": {"id": "2607cceb-738d-4d14-a768-ffe04914828b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-36447306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dbad607de614a809c51668c2ac0d012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape77b7975-dc", "ovs_interfaceid": "e77b7975-dc57-4094-9c2d-546c6bce9653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.819021] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963494, 'name': CreateVM_Task, 'duration_secs': 0.579853} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.819818] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.820559] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.820771] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.821116] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 995.821637] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef713c00-5a9e-4a62-b3ba-0ce76822746d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.827165] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 995.827165] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525381bb-491b-d864-d932-67d023081c6c" [ 995.827165] env[68279]: _type = "Task" [ 995.827165] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.836230] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525381bb-491b-d864-d932-67d023081c6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.868669] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963496, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.072552] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.072883] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.079287] env[68279]: INFO nova.compute.manager [-] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Took 1.02 seconds to deallocate network for instance. [ 996.104754] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "refresh_cache-4dd80f75-13d0-43d7-8042-b175dff50250" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.104945] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "refresh_cache-4dd80f75-13d0-43d7-8042-b175dff50250" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.105212] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 996.156716] env[68279]: DEBUG nova.compute.utils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 996.158315] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 996.158451] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 996.211122] env[68279]: DEBUG nova.policy [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce3eaacf18f94d979400de2071e05ad5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cec84ec5eaf740cab9a1c56bfb9d6244', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 996.250604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "e6f39528-384c-456b-8155-a6856bab3ce0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.577s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.265887] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.310565] env[68279]: DEBUG oslo_concurrency.lockutils [req-25336b26-01a5-4fb7-9a47-b9fb5c0dfeb6 req-7c66b46c-46f1-48e1-b808-bc62575d2196 service nova] Releasing lock "refresh_cache-1bd92b53-46c0-4b63-be20-857cffed87cd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.341059] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525381bb-491b-d864-d932-67d023081c6c, 'name': SearchDatastore_Task, 'duration_secs': 0.012741} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.341368] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.341593] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.341823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.341962] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.342159] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.342426] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd1fa78d-1697-4c05-8704-077c8afe4df4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.353475] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.353641] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.354421] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e604844b-cb3a-499a-b58d-ea1d264e935d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.363861] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 996.363861] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52570c52-6d5c-af9a-fa38-215791b14bd8" [ 996.363861] env[68279]: _type = "Task" [ 996.363861] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.370278] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963496, 'name': PowerOffVM_Task, 'duration_secs': 0.551861} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.373138] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.378512] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfiguring VM instance instance-00000042 to detach disk 2002 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 996.382433] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67eee190-d076-4e3d-ab0e-18da7a8a0889 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.395694] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52570c52-6d5c-af9a-fa38-215791b14bd8, 'name': SearchDatastore_Task, 'duration_secs': 0.009825} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.399389] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d47e0b6-0c99-4db3-94b9-dcbde2828e5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.406139] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 996.406139] env[68279]: value = "task-2963498" [ 996.406139] env[68279]: _type = "Task" [ 996.406139] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.407751] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 996.407751] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529222a3-91b4-6395-5e69-60b40dad6ba4" [ 996.407751] env[68279]: _type = "Task" [ 996.407751] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.421983] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.425614] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529222a3-91b4-6395-5e69-60b40dad6ba4, 'name': SearchDatastore_Task, 'duration_secs': 0.0109} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.425964] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.426064] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1bd92b53-46c0-4b63-be20-857cffed87cd/1bd92b53-46c0-4b63-be20-857cffed87cd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 996.426664] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67a85a73-04f7-465c-859b-30ec80555e9d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.436103] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 996.436103] env[68279]: value = "task-2963499" [ 996.436103] env[68279]: _type = "Task" [ 996.436103] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.450025] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.476325] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Successfully created port: d80b24ec-9d0e-4fd6-8503-b7081455b339 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.565617] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74a7fd5-8aaf-4689-a50a-b0c83f0bd966 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.572860] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf029688-8890-4677-b137-caf17f21f611 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.576566] env[68279]: INFO nova.compute.manager [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Detaching volume 064e058e-cfe0-4945-a75c-2c0d2b58a092 [ 996.608593] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.614169] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62903e3-0d4f-4abf-93be-acb3a7ad6aef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.624179] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d37f18-358f-45b4-9c5f-392169b50137 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.641031] env[68279]: DEBUG nova.compute.provider_tree [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.653118] env[68279]: INFO nova.virt.block_device [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Attempting to driver detach volume 064e058e-cfe0-4945-a75c-2c0d2b58a092 from mountpoint /dev/sdb [ 996.653364] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 996.653560] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594650', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'name': 'volume-064e058e-cfe0-4945-a75c-2c0d2b58a092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'eccc5882-2c8b-456d-bbd2-d9ed22777a77', 'attached_at': '', 'detached_at': '', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'serial': '064e058e-cfe0-4945-a75c-2c0d2b58a092'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 996.654456] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200f6500-ac63-46ce-9457-0c19eb48594a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.657834] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 996.661117] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 996.684918] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3734039-4521-44b5-8e8e-d669094c5ba7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.693267] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d7edf6-1cc8-4db8-8be7-8907e434066b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.717161] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263911b3-8022-4ba7-b720-9f2898556f44 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.734617] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] The volume has not been displaced from its original location: [datastore2] volume-064e058e-cfe0-4945-a75c-2c0d2b58a092/volume-064e058e-cfe0-4945-a75c-2c0d2b58a092.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 996.739981] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfiguring VM instance instance-00000017 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 996.740387] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f0d0343-2699-4f40-b701-fc928fa18247 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.753873] env[68279]: DEBUG nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 996.766655] env[68279]: DEBUG oslo_vmware.api [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 996.766655] env[68279]: value = "task-2963500" [ 996.766655] env[68279]: _type = "Task" [ 996.766655] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.770653] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963497, 'name': ReconfigVM_Task, 'duration_secs': 0.774366} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.776420] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 0731fdf9-f90c-46a4-9165-f6d91767b51b/0731fdf9-f90c-46a4-9165-f6d91767b51b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.777126] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aae2933c-011e-4102-91c8-e3157856b1bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.786479] env[68279]: DEBUG oslo_vmware.api [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.788092] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 996.788092] env[68279]: value = "task-2963501" [ 996.788092] env[68279]: _type = "Task" [ 996.788092] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.797942] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963501, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.860602] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Updating instance_info_cache with network_info: [{"id": "14ed552c-b208-40b2-969d-fea6c41a4a0d", "address": "fa:16:3e:9b:09:88", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14ed552c-b2", "ovs_interfaceid": "14ed552c-b208-40b2-969d-fea6c41a4a0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.918506] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963498, 'name': ReconfigVM_Task, 'duration_secs': 0.348367} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.918794] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfigured VM instance instance-00000042 to detach disk 2002 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 996.918964] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.919250] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d5cf907-1f54-42d8-b368-682134a7b904 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.926422] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 996.926422] env[68279]: value = "task-2963502" [ 996.926422] env[68279]: _type = "Task" [ 996.926422] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.950311] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963502, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.953349] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963499, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.087955] env[68279]: DEBUG nova.compute.manager [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Received event network-vif-plugged-14ed552c-b208-40b2-969d-fea6c41a4a0d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.088214] env[68279]: DEBUG oslo_concurrency.lockutils [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] Acquiring lock "4dd80f75-13d0-43d7-8042-b175dff50250-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.088410] env[68279]: DEBUG oslo_concurrency.lockutils [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] Lock "4dd80f75-13d0-43d7-8042-b175dff50250-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.088582] env[68279]: DEBUG oslo_concurrency.lockutils [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] Lock "4dd80f75-13d0-43d7-8042-b175dff50250-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.088748] env[68279]: DEBUG nova.compute.manager [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] No waiting events found dispatching network-vif-plugged-14ed552c-b208-40b2-969d-fea6c41a4a0d {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 997.088910] env[68279]: WARNING nova.compute.manager [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Received unexpected event network-vif-plugged-14ed552c-b208-40b2-969d-fea6c41a4a0d for instance with vm_state building and task_state spawning. [ 997.089079] env[68279]: DEBUG nova.compute.manager [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Received event network-changed-14ed552c-b208-40b2-969d-fea6c41a4a0d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.089234] env[68279]: DEBUG nova.compute.manager [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Refreshing instance network info cache due to event network-changed-14ed552c-b208-40b2-969d-fea6c41a4a0d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 997.089422] env[68279]: DEBUG oslo_concurrency.lockutils [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] Acquiring lock "refresh_cache-4dd80f75-13d0-43d7-8042-b175dff50250" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.145820] env[68279]: DEBUG nova.scheduler.client.report [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.276770] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.280600] env[68279]: DEBUG oslo_vmware.api [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963500, 'name': ReconfigVM_Task, 'duration_secs': 0.349103} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.280932] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Reconfigured VM instance instance-00000017 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 997.285579] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6732d1ca-9ad1-454e-804e-1fd7a6483383 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.305541] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963501, 'name': Rename_Task, 'duration_secs': 0.260001} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.306742] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.307055] env[68279]: DEBUG oslo_vmware.api [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 997.307055] env[68279]: value = "task-2963503" [ 997.307055] env[68279]: _type = "Task" [ 997.307055] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.307347] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6be1a53-7add-47bb-b7ee-e41ee88c6fd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.316260] env[68279]: DEBUG oslo_vmware.api [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963503, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.317417] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 997.317417] env[68279]: value = "task-2963504" [ 997.317417] env[68279]: _type = "Task" [ 997.317417] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.325166] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.363145] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "refresh_cache-4dd80f75-13d0-43d7-8042-b175dff50250" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.363362] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Instance network_info: |[{"id": "14ed552c-b208-40b2-969d-fea6c41a4a0d", "address": "fa:16:3e:9b:09:88", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14ed552c-b2", "ovs_interfaceid": "14ed552c-b208-40b2-969d-fea6c41a4a0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 997.363640] env[68279]: DEBUG oslo_concurrency.lockutils [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] Acquired lock "refresh_cache-4dd80f75-13d0-43d7-8042-b175dff50250" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.363824] env[68279]: DEBUG nova.network.neutron [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Refreshing network info cache for port 14ed552c-b208-40b2-969d-fea6c41a4a0d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 997.365117] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:09:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee617cec-01ea-4a11-ac04-ef9767f4c86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14ed552c-b208-40b2-969d-fea6c41a4a0d', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.372596] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Creating folder: Project (cec84ec5eaf740cab9a1c56bfb9d6244). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 997.375890] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a72e3f89-85ff-4512-8614-ed8b1409f79f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.387555] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Created folder: Project (cec84ec5eaf740cab9a1c56bfb9d6244) in parent group-v594445. [ 997.387754] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Creating folder: Instances. Parent ref: group-v594687. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 997.388483] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92ec1792-09d4-4067-9144-4e209fd20ec4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.397659] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Created folder: Instances in parent group-v594687. [ 997.397966] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.398099] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 997.398320] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4371b52-fa86-42de-9efa-cd05000afc9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.433527] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.433527] env[68279]: value = "task-2963507" [ 997.433527] env[68279]: _type = "Task" [ 997.433527] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.440731] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963502, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.447041] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963507, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.452044] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523765} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.454354] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 1bd92b53-46c0-4b63-be20-857cffed87cd/1bd92b53-46c0-4b63-be20-857cffed87cd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 997.454640] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 997.454915] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e16b27e0-6308-493a-ade6-f8a18d1ceb0b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.462662] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 997.462662] env[68279]: value = "task-2963508" [ 997.462662] env[68279]: _type = "Task" [ 997.462662] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.471932] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.606281] env[68279]: DEBUG nova.network.neutron [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Updated VIF entry in instance network info cache for port 14ed552c-b208-40b2-969d-fea6c41a4a0d. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.606644] env[68279]: DEBUG nova.network.neutron [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Updating instance_info_cache with network_info: [{"id": "14ed552c-b208-40b2-969d-fea6c41a4a0d", "address": "fa:16:3e:9b:09:88", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14ed552c-b2", "ovs_interfaceid": "14ed552c-b208-40b2-969d-fea6c41a4a0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.651103] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.654449] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.055s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.655728] env[68279]: INFO nova.compute.claims [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 997.671321] env[68279]: INFO nova.scheduler.client.report [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted allocations for instance 67466e30-5944-490c-a89b-2d32c59525be [ 997.688359] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 997.718362] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 997.718654] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.718838] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 997.719074] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.719250] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 997.719795] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 997.720070] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 997.720265] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 997.720451] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 997.720647] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 997.720837] env[68279]: DEBUG nova.virt.hardware [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 997.722651] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2604e49-86ff-4689-9d8e-f69fe219301b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.733488] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef4ba65-4dce-4ee2-a638-120227a27229 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.819682] env[68279]: DEBUG oslo_vmware.api [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963503, 'name': ReconfigVM_Task, 'duration_secs': 0.154736} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.823397] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594650', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'name': 'volume-064e058e-cfe0-4945-a75c-2c0d2b58a092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'eccc5882-2c8b-456d-bbd2-d9ed22777a77', 'attached_at': '', 'detached_at': '', 'volume_id': '064e058e-cfe0-4945-a75c-2c0d2b58a092', 'serial': '064e058e-cfe0-4945-a75c-2c0d2b58a092'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 997.831840] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963504, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.938729] env[68279]: DEBUG oslo_vmware.api [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963502, 'name': PowerOnVM_Task, 'duration_secs': 0.673503} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.942138] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.942393] env[68279]: DEBUG nova.compute.manager [None req-1ee422d8-b629-4f31-ac08-e494f9a15537 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.943208] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b082c41-70d0-46fd-9639-d66a40bc43e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.952530] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963507, 'name': CreateVM_Task, 'duration_secs': 0.496045} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.954791] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.959091] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.959399] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.959816] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 997.960485] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d121aae-039a-4df3-9f49-ea46402054bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.970032] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 997.970032] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ea9b64-ca76-6bf9-e4f9-49d1089ab25b" [ 997.970032] env[68279]: _type = "Task" [ 997.970032] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.980094] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074729} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.981151] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 997.982455] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf7cd80-1331-47a2-968f-1783dfbfa9e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.992680] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ea9b64-ca76-6bf9-e4f9-49d1089ab25b, 'name': SearchDatastore_Task, 'duration_secs': 0.017016} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.993644] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.993975] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.994333] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.994530] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.995084] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.009298] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6982aa5-8e42-4a2a-a553-1a5c2b010fc0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.020884] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 1bd92b53-46c0-4b63-be20-857cffed87cd/1bd92b53-46c0-4b63-be20-857cffed87cd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.021936] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cedbb1fa-3d6b-46ea-b5c8-01f1305e0051 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.042518] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 998.042518] env[68279]: value = "task-2963509" [ 998.042518] env[68279]: _type = "Task" [ 998.042518] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.043797] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.043981] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 998.049552] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66e580a2-4b3b-4ede-96a3-ad4a0db061d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.055879] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 998.055879] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525f90f3-1623-ea17-316b-2e0ef072b047" [ 998.055879] env[68279]: _type = "Task" [ 998.055879] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.059668] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963509, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.071832] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525f90f3-1623-ea17-316b-2e0ef072b047, 'name': SearchDatastore_Task, 'duration_secs': 0.009855} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.071832] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62fe74e-bf19-458a-b989-0d2c0038ec6a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.073931] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 998.073931] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52614032-fe0c-8298-2bc8-8373db1cd688" [ 998.073931] env[68279]: _type = "Task" [ 998.073931] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.074725] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Successfully updated port: d80b24ec-9d0e-4fd6-8503-b7081455b339 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 998.092841] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52614032-fe0c-8298-2bc8-8373db1cd688, 'name': SearchDatastore_Task, 'duration_secs': 0.011023} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.093116] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.093368] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4dd80f75-13d0-43d7-8042-b175dff50250/4dd80f75-13d0-43d7-8042-b175dff50250.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.093622] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb2ff5f0-4089-4582-acf0-d13e8282ade4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.102887] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 998.102887] env[68279]: value = "task-2963510" [ 998.102887] env[68279]: _type = "Task" [ 998.102887] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.112720] env[68279]: DEBUG oslo_concurrency.lockutils [req-58507749-de77-4ef3-a063-0a915e3cf8d9 req-0e35a075-a870-4f88-84ac-ca8d81cde51c service nova] Releasing lock "refresh_cache-4dd80f75-13d0-43d7-8042-b175dff50250" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.113038] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.180137] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ad0abff-309e-4021-b5fc-06a7517bca6f tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "67466e30-5944-490c-a89b-2d32c59525be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.347s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.329865] env[68279]: DEBUG oslo_vmware.api [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963504, 'name': PowerOnVM_Task, 'duration_secs': 0.691203} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.330176] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.330414] env[68279]: INFO nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Took 8.31 seconds to spawn the instance on the hypervisor. [ 998.330638] env[68279]: DEBUG nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.331565] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c01f0b-3deb-4250-b63c-246840e52e71 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.376255] env[68279]: DEBUG nova.objects.instance [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'flavor' on Instance uuid eccc5882-2c8b-456d-bbd2-d9ed22777a77 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.553901] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963509, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.578487] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "refresh_cache-4090e245-b026-4d3a-b7f0-e61543701d8f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.578487] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "refresh_cache-4090e245-b026-4d3a-b7f0-e61543701d8f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.578487] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.613704] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482699} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.613964] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4dd80f75-13d0-43d7-8042-b175dff50250/4dd80f75-13d0-43d7-8042-b175dff50250.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.614202] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.614452] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0333c141-565d-4f59-ac5f-ef4d400a0a62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.621327] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 998.621327] env[68279]: value = "task-2963511" [ 998.621327] env[68279]: _type = "Task" [ 998.621327] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.629911] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.850958] env[68279]: INFO nova.compute.manager [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Took 31.11 seconds to build instance. [ 999.016159] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0517b8a3-b2bb-4e83-8eb3-d087169e1f50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.024459] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da4de75-0b08-470a-b6b3-af433f7b1db5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.064244] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee039136-cd7c-4b53-a52d-3223c94424a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.074771] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963509, 'name': ReconfigVM_Task, 'duration_secs': 0.530132} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.075953] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d18f03-60fc-469b-82fa-877d12755d60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.080894] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 1bd92b53-46c0-4b63-be20-857cffed87cd/1bd92b53-46c0-4b63-be20-857cffed87cd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.083524] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4c9d060-034e-4730-a370-923623efbb25 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.098403] env[68279]: DEBUG nova.compute.provider_tree [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.101628] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 999.101628] env[68279]: value = "task-2963512" [ 999.101628] env[68279]: _type = "Task" [ 999.101628] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.111228] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963512, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.130782] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.210849} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.131333] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 999.132189] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead54332-cc73-482d-bb4a-135a1e56192c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.136777] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 999.158405] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 4dd80f75-13d0-43d7-8042-b175dff50250/4dd80f75-13d0-43d7-8042-b175dff50250.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.159848] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7af343bc-ef8b-4a0e-969d-85807b091c56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.176138] env[68279]: DEBUG nova.compute.manager [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Received event network-vif-plugged-d80b24ec-9d0e-4fd6-8503-b7081455b339 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.176138] env[68279]: DEBUG oslo_concurrency.lockutils [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] Acquiring lock "4090e245-b026-4d3a-b7f0-e61543701d8f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.176322] env[68279]: DEBUG oslo_concurrency.lockutils [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.176476] env[68279]: DEBUG oslo_concurrency.lockutils [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.178041] env[68279]: DEBUG nova.compute.manager [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] No waiting events found dispatching network-vif-plugged-d80b24ec-9d0e-4fd6-8503-b7081455b339 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 999.178041] env[68279]: WARNING nova.compute.manager [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Received unexpected event network-vif-plugged-d80b24ec-9d0e-4fd6-8503-b7081455b339 for instance with vm_state building and task_state spawning. [ 999.178041] env[68279]: DEBUG nova.compute.manager [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Received event network-changed-d80b24ec-9d0e-4fd6-8503-b7081455b339 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.178041] env[68279]: DEBUG nova.compute.manager [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Refreshing instance network info cache due to event network-changed-d80b24ec-9d0e-4fd6-8503-b7081455b339. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 999.178041] env[68279]: DEBUG oslo_concurrency.lockutils [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] Acquiring lock "refresh_cache-4090e245-b026-4d3a-b7f0-e61543701d8f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.184218] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 999.184218] env[68279]: value = "task-2963513" [ 999.184218] env[68279]: _type = "Task" [ 999.184218] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.193211] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963513, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.338296] env[68279]: DEBUG nova.network.neutron [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Updating instance_info_cache with network_info: [{"id": "d80b24ec-9d0e-4fd6-8503-b7081455b339", "address": "fa:16:3e:a0:93:e1", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd80b24ec-9d", "ovs_interfaceid": "d80b24ec-9d0e-4fd6-8503-b7081455b339", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.355832] env[68279]: DEBUG oslo_concurrency.lockutils [None req-aae64fe4-09bc-4b9c-997a-0a7c68bb6f2f tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.625s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.387339] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b93accf-d471-4fb9-be96-db33d9a40acb tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.314s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.606020] env[68279]: DEBUG nova.scheduler.client.report [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.619630] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963512, 'name': Rename_Task, 'duration_secs': 0.257856} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.620935] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.621387] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0425004-1ac8-48ce-8167-17d53b45b8cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.628201] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 999.628201] env[68279]: value = "task-2963514" [ 999.628201] env[68279]: _type = "Task" [ 999.628201] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.637685] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963514, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.696611] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.840921] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "refresh_cache-4090e245-b026-4d3a-b7f0-e61543701d8f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.841266] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Instance network_info: |[{"id": "d80b24ec-9d0e-4fd6-8503-b7081455b339", "address": "fa:16:3e:a0:93:e1", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd80b24ec-9d", "ovs_interfaceid": "d80b24ec-9d0e-4fd6-8503-b7081455b339", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 999.841867] env[68279]: DEBUG oslo_concurrency.lockutils [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] Acquired lock "refresh_cache-4090e245-b026-4d3a-b7f0-e61543701d8f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.842018] env[68279]: DEBUG nova.network.neutron [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Refreshing network info cache for port d80b24ec-9d0e-4fd6-8503-b7081455b339 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.843204] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:93:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee617cec-01ea-4a11-ac04-ef9767f4c86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd80b24ec-9d0e-4fd6-8503-b7081455b339', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.852301] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.852301] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.852395] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-066b4c1e-7c87-47d8-91e3-a180f23472e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.874153] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.874153] env[68279]: value = "task-2963515" [ 999.874153] env[68279]: _type = "Task" [ 999.874153] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.883462] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963515, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.108984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.109583] env[68279]: DEBUG nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1000.112937] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 28.004s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.113131] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.113285] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1000.113565] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.837s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.114989] env[68279]: INFO nova.compute.claims [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1000.118244] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54802f0-4fd4-4409-8c58-1f9d281af35c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.126501] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa402e8-e0d2-49a8-8819-1916d2000fca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.146787] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff81636d-7168-4ed3-a070-ceb58d7b6aec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.149392] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963514, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.154951] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36add072-b3e9-492b-a40a-8d4005ddd850 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.186721] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178687MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1000.186877] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.195186] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963513, 'name': ReconfigVM_Task, 'duration_secs': 0.760101} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.195481] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 4dd80f75-13d0-43d7-8042-b175dff50250/4dd80f75-13d0-43d7-8042-b175dff50250.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.196107] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2961cdd2-8600-4214-9c15-6ff5086b68a5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.203287] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1000.203287] env[68279]: value = "task-2963516" [ 1000.203287] env[68279]: _type = "Task" [ 1000.203287] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.210934] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963516, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.326239] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.326582] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.326869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.327154] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.327493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.330842] env[68279]: INFO nova.compute.manager [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Terminating instance [ 1000.384069] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963515, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.388997] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.389237] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.389468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.389653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.389827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.391958] env[68279]: INFO nova.compute.manager [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Terminating instance [ 1000.566073] env[68279]: DEBUG nova.network.neutron [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Updated VIF entry in instance network info cache for port d80b24ec-9d0e-4fd6-8503-b7081455b339. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.566496] env[68279]: DEBUG nova.network.neutron [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Updating instance_info_cache with network_info: [{"id": "d80b24ec-9d0e-4fd6-8503-b7081455b339", "address": "fa:16:3e:a0:93:e1", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd80b24ec-9d", "ovs_interfaceid": "d80b24ec-9d0e-4fd6-8503-b7081455b339", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.622797] env[68279]: DEBUG nova.compute.utils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1000.624222] env[68279]: DEBUG nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1000.624575] env[68279]: DEBUG nova.network.neutron [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1000.641512] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963514, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.663110] env[68279]: DEBUG nova.policy [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e99b9f502524423bb308e1b5ec12b71', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f54b17f9e714a32ae5c97117a87745c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1000.715035] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963516, 'name': Rename_Task, 'duration_secs': 0.302709} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.715271] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.715526] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb2f3db5-b224-4b1c-aba3-5642c2efb9ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.723939] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1000.723939] env[68279]: value = "task-2963517" [ 1000.723939] env[68279]: _type = "Task" [ 1000.723939] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.733020] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963517, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.839151] env[68279]: DEBUG nova.compute.manager [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.839151] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.839810] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554f3046-75c0-423a-bbac-3b4ddbd3bb05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.848321] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.848658] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5051bc66-7983-440f-8db5-b72796abdbe1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.855405] env[68279]: DEBUG oslo_vmware.api [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 1000.855405] env[68279]: value = "task-2963518" [ 1000.855405] env[68279]: _type = "Task" [ 1000.855405] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.864730] env[68279]: DEBUG oslo_vmware.api [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963518, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.886175] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963515, 'name': CreateVM_Task, 'duration_secs': 0.855764} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.886448] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.887616] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.887616] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.888056] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1000.888382] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7ba7667-e62f-49cb-8b45-ceeb935d8eda {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.895516] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1000.895516] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a18fee-7443-dcc5-7cbe-6e2007e5ffe7" [ 1000.895516] env[68279]: _type = "Task" [ 1000.895516] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.895800] env[68279]: DEBUG nova.compute.manager [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.895885] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.897092] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c133f53c-246a-4ba0-a0cd-1d8be87b9c20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.914527] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a18fee-7443-dcc5-7cbe-6e2007e5ffe7, 'name': SearchDatastore_Task, 'duration_secs': 0.011704} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.917101] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.917274] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.917536] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.917694] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.917879] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.918230] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.918480] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f322f300-260f-4dc2-a4ac-e67872c0b26c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.920486] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-456e495c-8a8a-4120-80e8-bcdbb5d8cd0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.929577] env[68279]: DEBUG oslo_vmware.api [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1000.929577] env[68279]: value = "task-2963519" [ 1000.929577] env[68279]: _type = "Task" [ 1000.929577] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.934374] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.934742] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.935753] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a481749b-cfd7-4012-b1d2-a4064c5699d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.944683] env[68279]: DEBUG oslo_vmware.api [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963519, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.946788] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1000.946788] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521a5c9e-ffbb-b097-d91b-1852d24b82cf" [ 1000.946788] env[68279]: _type = "Task" [ 1000.946788] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.956869] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521a5c9e-ffbb-b097-d91b-1852d24b82cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.996712] env[68279]: DEBUG nova.network.neutron [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Successfully created port: a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1001.069044] env[68279]: DEBUG oslo_concurrency.lockutils [req-fcb3a689-c1d1-4178-b9b1-480236004d94 req-68353387-a674-4fb4-80be-9f32df536074 service nova] Releasing lock "refresh_cache-4090e245-b026-4d3a-b7f0-e61543701d8f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.131073] env[68279]: DEBUG nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1001.146167] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963514, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.235945] env[68279]: DEBUG nova.compute.manager [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Received event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1001.236175] env[68279]: DEBUG nova.compute.manager [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing instance network info cache due to event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1001.236392] env[68279]: DEBUG oslo_concurrency.lockutils [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] Acquiring lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.236536] env[68279]: DEBUG oslo_concurrency.lockutils [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] Acquired lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.236697] env[68279]: DEBUG nova.network.neutron [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.248359] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963517, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.288131] env[68279]: DEBUG oslo_concurrency.lockutils [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.288360] env[68279]: DEBUG oslo_concurrency.lockutils [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.371794] env[68279]: DEBUG oslo_vmware.api [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963518, 'name': PowerOffVM_Task, 'duration_secs': 0.185526} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.374499] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.374717] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.375150] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35f387ef-2051-45c9-8abd-fb34ffb91157 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.444734] env[68279]: DEBUG oslo_vmware.api [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963519, 'name': PowerOffVM_Task, 'duration_secs': 0.228839} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.445067] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.445280] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.445427] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7c9f935-dd92-4e58-837d-606f71e21db3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.448931] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.449248] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.449512] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleting the datastore file [datastore2] 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.455645] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fac0ddae-743f-4f3a-a220-fd55b3ace5b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.465948] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521a5c9e-ffbb-b097-d91b-1852d24b82cf, 'name': SearchDatastore_Task, 'duration_secs': 0.028202} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.468751] env[68279]: DEBUG oslo_vmware.api [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 1001.468751] env[68279]: value = "task-2963522" [ 1001.468751] env[68279]: _type = "Task" [ 1001.468751] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.468751] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eea52805-2fa1-432b-ad03-972ab7e432cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.483420] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1001.483420] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c2e563-1f36-1cba-7b1b-81a90489f7f0" [ 1001.483420] env[68279]: _type = "Task" [ 1001.483420] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.489029] env[68279]: DEBUG oslo_vmware.api [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.501567] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c2e563-1f36-1cba-7b1b-81a90489f7f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.525643] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.526723] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.526723] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleting the datastore file [datastore1] eccc5882-2c8b-456d-bbd2-d9ed22777a77 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.526899] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa698e5f-a3ef-4cfb-bacc-d8f9fd6d9e49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.534491] env[68279]: DEBUG oslo_vmware.api [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1001.534491] env[68279]: value = "task-2963523" [ 1001.534491] env[68279]: _type = "Task" [ 1001.534491] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.547405] env[68279]: DEBUG oslo_vmware.api [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.577020] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3392bb13-3d37-4018-bd33-4737a4a9d02a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.583857] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80e09cf-ff20-47bb-8598-157ae79ddf27 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.616762] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df36dee5-b46c-4cfb-9643-e86dc2edb425 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.624613] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc6a772-6f68-4592-a134-6e48fa0d6b07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.643366] env[68279]: DEBUG nova.compute.provider_tree [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.652742] env[68279]: DEBUG oslo_vmware.api [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963514, 'name': PowerOnVM_Task, 'duration_secs': 1.601187} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.653522] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.653728] env[68279]: INFO nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1001.653909] env[68279]: DEBUG nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.654677] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe6b42b-1bc9-43a4-b5a2-9628a663b057 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.735787] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963517, 'name': PowerOnVM_Task, 'duration_secs': 0.633064} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.736532] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.736760] env[68279]: INFO nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Took 6.63 seconds to spawn the instance on the hypervisor. [ 1001.736993] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.737846] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819c0a4e-4cc9-4127-9d9c-1321664e1733 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.791254] env[68279]: DEBUG nova.compute.utils [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1001.960509] env[68279]: DEBUG nova.network.neutron [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updated VIF entry in instance network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.960915] env[68279]: DEBUG nova.network.neutron [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.982248] env[68279]: DEBUG oslo_vmware.api [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169527} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.982562] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.982750] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.982927] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.983115] env[68279]: INFO nova.compute.manager [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1001.983411] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.983859] env[68279]: DEBUG nova.compute.manager [-] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.983974] env[68279]: DEBUG nova.network.neutron [-] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.999135] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c2e563-1f36-1cba-7b1b-81a90489f7f0, 'name': SearchDatastore_Task, 'duration_secs': 0.022797} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.999411] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.999679] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4090e245-b026-4d3a-b7f0-e61543701d8f/4090e245-b026-4d3a-b7f0-e61543701d8f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.999942] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00aaa13c-790d-4422-8172-9c3fb5da818e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.007099] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1002.007099] env[68279]: value = "task-2963524" [ 1002.007099] env[68279]: _type = "Task" [ 1002.007099] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.016614] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.046153] env[68279]: DEBUG oslo_vmware.api [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142183} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.046410] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.046599] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.046775] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.046946] env[68279]: INFO nova.compute.manager [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1002.047204] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1002.047411] env[68279]: DEBUG nova.compute.manager [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1002.047513] env[68279]: DEBUG nova.network.neutron [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.146715] env[68279]: DEBUG nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1002.150186] env[68279]: DEBUG nova.scheduler.client.report [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.180261] env[68279]: INFO nova.compute.manager [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Took 32.53 seconds to build instance. [ 1002.193686] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1002.193945] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.194120] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1002.194305] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.194453] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1002.194603] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1002.194810] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1002.194967] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1002.195154] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1002.195334] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1002.195509] env[68279]: DEBUG nova.virt.hardware [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1002.196417] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8347d6-614b-409e-8efb-b60919013bf8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.207705] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7299ae73-455f-4db7-8da6-310a2aef6893 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.258590] env[68279]: INFO nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Took 32.48 seconds to build instance. [ 1002.293588] env[68279]: DEBUG oslo_concurrency.lockutils [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.464070] env[68279]: DEBUG oslo_concurrency.lockutils [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] Releasing lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.464396] env[68279]: DEBUG nova.compute.manager [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Received event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.464396] env[68279]: DEBUG nova.compute.manager [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing instance network info cache due to event network-changed-971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1002.465232] env[68279]: DEBUG oslo_concurrency.lockutils [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] Acquiring lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.465232] env[68279]: DEBUG oslo_concurrency.lockutils [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] Acquired lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.465232] env[68279]: DEBUG nova.network.neutron [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Refreshing network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1002.517985] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963524, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.614299] env[68279]: DEBUG nova.network.neutron [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Successfully updated port: a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.660598] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.661150] env[68279]: DEBUG nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1002.664091] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.619s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.665570] env[68279]: INFO nova.compute.claims [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1002.687060] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e31eaa9b-ae7c-4d18-b9f2-f3387b9810ac tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.039s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.738295] env[68279]: DEBUG nova.compute.manager [req-6f8e9ed3-d67f-4f9b-ba8f-772e039893f6 req-a1455b90-67ec-4342-88eb-dd6110111c74 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Received event network-vif-deleted-15317896-8bd1-46c4-8fc9-8bf0966392a4 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.738295] env[68279]: INFO nova.compute.manager [req-6f8e9ed3-d67f-4f9b-ba8f-772e039893f6 req-a1455b90-67ec-4342-88eb-dd6110111c74 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Neutron deleted interface 15317896-8bd1-46c4-8fc9-8bf0966392a4; detaching it from the instance and deleting it from the info cache [ 1002.738295] env[68279]: DEBUG nova.network.neutron [req-6f8e9ed3-d67f-4f9b-ba8f-772e039893f6 req-a1455b90-67ec-4342-88eb-dd6110111c74 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.742466] env[68279]: DEBUG nova.network.neutron [-] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.763468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4dd80f75-13d0-43d7-8042-b175dff50250" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.992s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.023695] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963524, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.950214} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.027190] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 4090e245-b026-4d3a-b7f0-e61543701d8f/4090e245-b026-4d3a-b7f0-e61543701d8f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.027512] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.027797] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf23ada2-9986-4313-87b3-1c0e5b66845c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.037403] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1003.037403] env[68279]: value = "task-2963525" [ 1003.037403] env[68279]: _type = "Task" [ 1003.037403] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.048353] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.084470] env[68279]: DEBUG nova.network.neutron [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.119402] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.119402] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquired lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.119402] env[68279]: DEBUG nova.network.neutron [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.177986] env[68279]: DEBUG nova.compute.utils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1003.182163] env[68279]: DEBUG nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1003.182163] env[68279]: DEBUG nova.network.neutron [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1003.246327] env[68279]: INFO nova.compute.manager [-] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Took 1.26 seconds to deallocate network for instance. [ 1003.246953] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25fcd48e-69b9-412a-a173-a2b801391e98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.252137] env[68279]: DEBUG nova.policy [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd67d0e35641a4494a5087e0f3abdc767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd1384256d224e80bf6f25b9fd054376', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1003.260632] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1693a8b-4733-47c5-8c20-d4ae46921ae3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.272613] env[68279]: DEBUG nova.network.neutron [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updated VIF entry in instance network info cache for port 971e9f68-2eb2-418a-92ac-ab9f6e6b6859. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.273314] env[68279]: DEBUG nova.network.neutron [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [{"id": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "address": "fa:16:3e:69:95:46", "network": {"id": "a8dd221c-10da-47e4-a1d5-1f232d785377", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-651074156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "182c7f7affa443dba0ce3affd30eed42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971e9f68-2e", "ovs_interfaceid": "971e9f68-2eb2-418a-92ac-ab9f6e6b6859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.307696] env[68279]: DEBUG nova.compute.manager [req-6f8e9ed3-d67f-4f9b-ba8f-772e039893f6 req-a1455b90-67ec-4342-88eb-dd6110111c74 service nova] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Detach interface failed, port_id=15317896-8bd1-46c4-8fc9-8bf0966392a4, reason: Instance eccc5882-2c8b-456d-bbd2-d9ed22777a77 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1003.400183] env[68279]: DEBUG nova.compute.manager [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Received event network-vif-deleted-d95a0993-73f2-4ac5-a5bb-298e3b990150 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.400491] env[68279]: DEBUG nova.compute.manager [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Received event network-vif-plugged-a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.400709] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] Acquiring lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.400914] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.401094] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.401264] env[68279]: DEBUG nova.compute.manager [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] No waiting events found dispatching network-vif-plugged-a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1003.401433] env[68279]: WARNING nova.compute.manager [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Received unexpected event network-vif-plugged-a36ff15f-dc24-4fe7-aaf1-66caad63a54f for instance with vm_state building and task_state spawning. [ 1003.401705] env[68279]: DEBUG nova.compute.manager [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Received event network-changed-a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.401763] env[68279]: DEBUG nova.compute.manager [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Refreshing instance network info cache due to event network-changed-a36ff15f-dc24-4fe7-aaf1-66caad63a54f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1003.401915] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] Acquiring lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.404968] env[68279]: DEBUG oslo_concurrency.lockutils [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.405192] env[68279]: DEBUG oslo_concurrency.lockutils [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.405396] env[68279]: INFO nova.compute.manager [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Attaching volume ad42c3b3-ce0d-4cb3-b4a4-3be32274b555 to /dev/sdb [ 1003.442017] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46c7be5-1f36-4c53-ad84-6fb23b35ba01 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.450417] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709c1902-58d5-4888-a970-d5a00d4a014b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.465292] env[68279]: DEBUG nova.virt.block_device [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating existing volume attachment record: 953df854-45c6-4c8f-a47f-7ccc3f4cf7fb {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1003.547455] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074228} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.547792] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.548384] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d810977d-b04f-486f-9eae-3f166282ee1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.573030] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 4090e245-b026-4d3a-b7f0-e61543701d8f/4090e245-b026-4d3a-b7f0-e61543701d8f.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.573030] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-621d7136-c74b-4f4c-9143-f4ad8e0443a5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.588273] env[68279]: INFO nova.compute.manager [-] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Took 1.54 seconds to deallocate network for instance. [ 1003.595180] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1003.595180] env[68279]: value = "task-2963526" [ 1003.595180] env[68279]: _type = "Task" [ 1003.595180] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.604826] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963526, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.656118] env[68279]: DEBUG nova.network.neutron [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.682221] env[68279]: DEBUG nova.network.neutron [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Successfully created port: 5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.682477] env[68279]: DEBUG nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1003.759842] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.776263] env[68279]: DEBUG oslo_concurrency.lockutils [req-c5b30da1-57a4-4f81-8108-a0af3aa74c50 req-072aa27e-b1f2-4c5f-8b5f-090b9ecb4f38 service nova] Releasing lock "refresh_cache-c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.822415] env[68279]: DEBUG nova.network.neutron [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Updating instance_info_cache with network_info: [{"id": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "address": "fa:16:3e:b4:5c:2f", "network": {"id": "7aa6c0b4-01c0-4b5e-b7c1-19824b0dde00", "bridge": "br-int", "label": "tempest-ServersTestJSON-597296579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f54b17f9e714a32ae5c97117a87745c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa36ff15f-dc", "ovs_interfaceid": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.101228] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.114200] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.158840] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b7e4c3-2667-4064-98e7-f27689a2c1f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.170869] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6ab914-c4ec-4a8c-933b-816598b0c32c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.214030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af09da56-4e1d-4e02-a9f2-ca936cab9e40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.223039] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c719596-8716-4f4b-9706-43cd37bcca07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.238599] env[68279]: DEBUG nova.compute.provider_tree [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.325152] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Releasing lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.325152] env[68279]: DEBUG nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Instance network_info: |[{"id": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "address": "fa:16:3e:b4:5c:2f", "network": {"id": "7aa6c0b4-01c0-4b5e-b7c1-19824b0dde00", "bridge": "br-int", "label": "tempest-ServersTestJSON-597296579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f54b17f9e714a32ae5c97117a87745c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa36ff15f-dc", "ovs_interfaceid": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1004.325373] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] Acquired lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.325548] env[68279]: DEBUG nova.network.neutron [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Refreshing network info cache for port a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.327477] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:5c:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a36ff15f-dc24-4fe7-aaf1-66caad63a54f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.336345] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Creating folder: Project (9f54b17f9e714a32ae5c97117a87745c). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.337597] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fd902c9-4017-4bab-94cd-190bdfeb1cbc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.351459] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Created folder: Project (9f54b17f9e714a32ae5c97117a87745c) in parent group-v594445. [ 1004.351920] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Creating folder: Instances. Parent ref: group-v594693. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.352366] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7293c0af-ec20-4b8a-8c80-3d86eeafdff2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.364769] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Created folder: Instances in parent group-v594693. [ 1004.365207] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.365522] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.365860] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7c86979-0208-4336-90b2-4127e9c079fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.395237] env[68279]: DEBUG nova.compute.manager [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1004.396569] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c492628a-9670-40d6-900f-5777c9bf89b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.404752] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.404752] env[68279]: value = "task-2963532" [ 1004.404752] env[68279]: _type = "Task" [ 1004.404752] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.424677] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963532, 'name': CreateVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.611328] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963526, 'name': ReconfigVM_Task, 'duration_secs': 0.657983} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.611716] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 4090e245-b026-4d3a-b7f0-e61543701d8f/4090e245-b026-4d3a-b7f0-e61543701d8f.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.612288] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a53a27e-d942-41d8-804b-41907b87731b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.619872] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1004.619872] env[68279]: value = "task-2963533" [ 1004.619872] env[68279]: _type = "Task" [ 1004.619872] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.630034] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963533, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.715260] env[68279]: DEBUG nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1004.744949] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1004.745281] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.745494] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.745702] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.745906] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.746078] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1004.746336] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1004.746554] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1004.746794] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1004.746979] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1004.747217] env[68279]: DEBUG nova.virt.hardware [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1004.748123] env[68279]: DEBUG nova.scheduler.client.report [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.752050] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5eeed8-bfd3-43d2-b513-2522094ab4a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.761903] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124d57ed-ea9e-4476-9c5e-42cff5f4f175 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.918066] env[68279]: INFO nova.compute.manager [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] instance snapshotting [ 1004.919729] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963532, 'name': CreateVM_Task, 'duration_secs': 0.469606} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.919957] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.920636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.920796] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.921222] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1004.923747] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbec7d6b-11de-4ba3-94c3-c8e377536ed8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.925884] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffc5449-1743-4119-80e5-61d8d3eaeb8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.932566] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1004.932566] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522b6d46-5cbd-6464-724d-f1b50d9d1823" [ 1004.932566] env[68279]: _type = "Task" [ 1004.932566] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.951284] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d028668-b4e3-4bc6-afde-87c8b0a5da5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.966412] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522b6d46-5cbd-6464-724d-f1b50d9d1823, 'name': SearchDatastore_Task, 'duration_secs': 0.011517} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.966931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.967177] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.967852] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.967852] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.967852] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.968096] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27ceb299-feee-4571-8e5c-c0640b99c979 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.976516] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.976690] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.977454] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-430722c7-0e52-4975-a690-90d494885cb0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.982892] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1004.982892] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524acc2b-8e1b-f290-4d22-c4bf824920b4" [ 1004.982892] env[68279]: _type = "Task" [ 1004.982892] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.994153] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524acc2b-8e1b-f290-4d22-c4bf824920b4, 'name': SearchDatastore_Task, 'duration_secs': 0.008857} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.995569] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-383c9325-8645-4a8c-8fe0-0ca6871422ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.002212] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1005.002212] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5274ccea-fe98-8005-7fcf-97f14318ca77" [ 1005.002212] env[68279]: _type = "Task" [ 1005.002212] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.010365] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5274ccea-fe98-8005-7fcf-97f14318ca77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.052659] env[68279]: DEBUG nova.network.neutron [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Updated VIF entry in instance network info cache for port a36ff15f-dc24-4fe7-aaf1-66caad63a54f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.053017] env[68279]: DEBUG nova.network.neutron [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Updating instance_info_cache with network_info: [{"id": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "address": "fa:16:3e:b4:5c:2f", "network": {"id": "7aa6c0b4-01c0-4b5e-b7c1-19824b0dde00", "bridge": "br-int", "label": "tempest-ServersTestJSON-597296579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f54b17f9e714a32ae5c97117a87745c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa36ff15f-dc", "ovs_interfaceid": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.130642] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963533, 'name': Rename_Task, 'duration_secs': 0.217977} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.130903] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.131170] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1d328e2-b081-4b38-9fe9-f4940db2de10 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.137109] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1005.137109] env[68279]: value = "task-2963534" [ 1005.137109] env[68279]: _type = "Task" [ 1005.137109] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.144659] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.257463] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.258026] env[68279]: DEBUG nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1005.261071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.517s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.261578] env[68279]: DEBUG nova.objects.instance [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lazy-loading 'resources' on Instance uuid 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.295546] env[68279]: DEBUG nova.compute.manager [req-a218a036-66e7-4730-a621-e48531567c02 req-8e47c74d-931f-4fb8-88c3-a1810f789da0 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Received event network-vif-plugged-5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.297984] env[68279]: DEBUG oslo_concurrency.lockutils [req-a218a036-66e7-4730-a621-e48531567c02 req-8e47c74d-931f-4fb8-88c3-a1810f789da0 service nova] Acquiring lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.298271] env[68279]: DEBUG oslo_concurrency.lockutils [req-a218a036-66e7-4730-a621-e48531567c02 req-8e47c74d-931f-4fb8-88c3-a1810f789da0 service nova] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.298500] env[68279]: DEBUG oslo_concurrency.lockutils [req-a218a036-66e7-4730-a621-e48531567c02 req-8e47c74d-931f-4fb8-88c3-a1810f789da0 service nova] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.298710] env[68279]: DEBUG nova.compute.manager [req-a218a036-66e7-4730-a621-e48531567c02 req-8e47c74d-931f-4fb8-88c3-a1810f789da0 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] No waiting events found dispatching network-vif-plugged-5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1005.298916] env[68279]: WARNING nova.compute.manager [req-a218a036-66e7-4730-a621-e48531567c02 req-8e47c74d-931f-4fb8-88c3-a1810f789da0 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Received unexpected event network-vif-plugged-5959e66b-7a16-41ba-8c1b-adbc5941455e for instance with vm_state building and task_state spawning. [ 1005.394926] env[68279]: DEBUG nova.network.neutron [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Successfully updated port: 5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.468650] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1005.469521] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-02bd05bd-e565-44aa-8fe7-c9682c24a80a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.478114] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 1005.478114] env[68279]: value = "task-2963535" [ 1005.478114] env[68279]: _type = "Task" [ 1005.478114] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.486207] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963535, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.511706] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5274ccea-fe98-8005-7fcf-97f14318ca77, 'name': SearchDatastore_Task, 'duration_secs': 0.009342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.511920] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.512188] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] efda54fe-09a3-4653-b16a-8b3cdd4849c5/efda54fe-09a3-4653-b16a-8b3cdd4849c5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.512448] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eea20297-9142-41e8-9963-119486f7ec47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.518703] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1005.518703] env[68279]: value = "task-2963536" [ 1005.518703] env[68279]: _type = "Task" [ 1005.518703] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.526314] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.556184] env[68279]: DEBUG oslo_concurrency.lockutils [req-b0861d62-2df2-4a2a-a7c9-279dbdf7d46e req-0c3e63ef-94bf-41b7-930b-1ca19e5b44bf service nova] Releasing lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.648896] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963534, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.765198] env[68279]: DEBUG nova.compute.utils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1005.771574] env[68279]: DEBUG nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1005.898228] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.898419] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.898566] env[68279]: DEBUG nova.network.neutron [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1005.994643] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963535, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.028706] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963536, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497297} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.031399] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] efda54fe-09a3-4653-b16a-8b3cdd4849c5/efda54fe-09a3-4653-b16a-8b3cdd4849c5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.031619] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.032067] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3efd982d-77d8-4abb-8cd1-0c0ee9663c62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.038812] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1006.038812] env[68279]: value = "task-2963537" [ 1006.038812] env[68279]: _type = "Task" [ 1006.038812] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.048937] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963537, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.151312] env[68279]: DEBUG oslo_vmware.api [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963534, 'name': PowerOnVM_Task, 'duration_secs': 0.740454} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.151623] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.151835] env[68279]: INFO nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1006.152040] env[68279]: DEBUG nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1006.152890] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111dfd7a-08c4-4bda-aa14-b68997a5f013 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.189476] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40245e41-c740-46c5-935b-a301256204e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.198243] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb29e1e6-9669-4256-9646-4b9f24bb759c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.233477] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3541714b-c5ed-433a-99f2-fc3d4e0e3368 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.242096] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cc47d0-d6c1-4813-8bbc-79480d68066b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.257110] env[68279]: DEBUG nova.compute.provider_tree [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.271733] env[68279]: DEBUG nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1006.489419] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963535, 'name': CreateSnapshot_Task, 'duration_secs': 0.811629} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.490148] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1006.490475] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffd6365-048f-48ac-bd50-67fd79dc4d0c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.521087] env[68279]: DEBUG nova.network.neutron [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.550655] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963537, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086887} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.550937] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.551774] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc593dab-61aa-43d5-9c62-9d7cb438bdf7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.579122] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] efda54fe-09a3-4653-b16a-8b3cdd4849c5/efda54fe-09a3-4653-b16a-8b3cdd4849c5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.579837] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adaf1e2b-8b25-4f3a-970e-2ae84144c066 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.603690] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1006.603690] env[68279]: value = "task-2963539" [ 1006.603690] env[68279]: _type = "Task" [ 1006.603690] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.617482] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963539, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.675091] env[68279]: INFO nova.compute.manager [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Took 36.85 seconds to build instance. [ 1006.760676] env[68279]: DEBUG nova.scheduler.client.report [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.808799] env[68279]: DEBUG nova.network.neutron [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updating instance_info_cache with network_info: [{"id": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "address": "fa:16:3e:67:cf:e5", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5959e66b-7a", "ovs_interfaceid": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.009168] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1007.009516] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0e9a35e4-ab65-4150-936f-a3080a8b97d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.019077] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 1007.019077] env[68279]: value = "task-2963540" [ 1007.019077] env[68279]: _type = "Task" [ 1007.019077] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.028207] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963540, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.119979] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963539, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.178936] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8cad72fd-65d3-48c0-9974-7e6124977795 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.364s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.266995] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.269071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.958s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.270754] env[68279]: INFO nova.compute.claims [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.281280] env[68279]: DEBUG nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1007.288830] env[68279]: INFO nova.scheduler.client.report [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleted allocations for instance 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab [ 1007.310749] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.311350] env[68279]: DEBUG nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Instance network_info: |[{"id": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "address": "fa:16:3e:67:cf:e5", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5959e66b-7a", "ovs_interfaceid": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1007.311489] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:cf:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5959e66b-7a16-41ba-8c1b-adbc5941455e', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.318936] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.321369] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1007.321500] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.322115] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1007.322115] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.322115] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1007.322304] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1007.322382] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1007.323417] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1007.323417] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1007.323417] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1007.323417] env[68279]: DEBUG nova.virt.hardware [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1007.324409] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.324718] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac9fa58-98d3-45f2-bd5d-8ba0af34eab6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.328244] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb0a08e1-d3b6-4c46-9fd6-370fa9b35c96 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.344124] env[68279]: DEBUG nova.compute.manager [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Received event network-changed-5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.344332] env[68279]: DEBUG nova.compute.manager [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Refreshing instance network info cache due to event network-changed-5959e66b-7a16-41ba-8c1b-adbc5941455e. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1007.344545] env[68279]: DEBUG oslo_concurrency.lockutils [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] Acquiring lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.344687] env[68279]: DEBUG oslo_concurrency.lockutils [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] Acquired lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.344843] env[68279]: DEBUG nova.network.neutron [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Refreshing network info cache for port 5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.352369] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24607dc8-b211-4601-80a5-1f2179890d9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.358016] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.358016] env[68279]: value = "task-2963541" [ 1007.358016] env[68279]: _type = "Task" [ 1007.358016] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.371398] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.377796] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Creating folder: Project (2068f166c2ca4d86930fdbec92315a8b). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.377969] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ef0c442-0fea-4ef0-bab0-88fb00aa26b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.382866] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963541, 'name': CreateVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.392669] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Created folder: Project (2068f166c2ca4d86930fdbec92315a8b) in parent group-v594445. [ 1007.392876] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Creating folder: Instances. Parent ref: group-v594699. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.393195] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b998aac7-170e-4359-b50f-a6c9f6e5a752 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.404316] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Created folder: Instances in parent group-v594699. [ 1007.404539] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.404813] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.405091] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e51c9a6f-65ed-4243-ba83-d27e402caa50 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.433806] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.433806] env[68279]: value = "task-2963544" [ 1007.433806] env[68279]: _type = "Task" [ 1007.433806] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.442935] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963544, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.530369] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963540, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.546138] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "4dd80f75-13d0-43d7-8042-b175dff50250" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.546418] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4dd80f75-13d0-43d7-8042-b175dff50250" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.546698] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "4dd80f75-13d0-43d7-8042-b175dff50250-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.546910] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4dd80f75-13d0-43d7-8042-b175dff50250-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.547111] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4dd80f75-13d0-43d7-8042-b175dff50250-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.549532] env[68279]: INFO nova.compute.manager [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Terminating instance [ 1007.618721] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "4090e245-b026-4d3a-b7f0-e61543701d8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.619147] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.619492] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "4090e245-b026-4d3a-b7f0-e61543701d8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.619793] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.620063] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.621711] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963539, 'name': ReconfigVM_Task, 'duration_secs': 0.9082} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.622167] env[68279]: INFO nova.compute.manager [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Terminating instance [ 1007.623538] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Reconfigured VM instance instance-00000059 to attach disk [datastore1] efda54fe-09a3-4653-b16a-8b3cdd4849c5/efda54fe-09a3-4653-b16a-8b3cdd4849c5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.625078] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82fb3551-ab8a-46ba-922b-a9e20e7e84ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.634102] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1007.634102] env[68279]: value = "task-2963545" [ 1007.634102] env[68279]: _type = "Task" [ 1007.634102] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.644429] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963545, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.798758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62ce9ec3-f645-4e25-8a54-2252756427e7 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "5c8d5c97-2b1c-4e43-86c1-9dfcd170faab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.837s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.868243] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963541, 'name': CreateVM_Task, 'duration_secs': 0.356823} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.868440] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.869120] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.869286] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.869642] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1007.869889] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-389087a4-83c3-421b-bda1-10449367dd72 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.874495] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1007.874495] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528e9541-1e19-bdbc-ab13-c32ea54b1f4a" [ 1007.874495] env[68279]: _type = "Task" [ 1007.874495] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.883075] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528e9541-1e19-bdbc-ab13-c32ea54b1f4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.943637] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963544, 'name': CreateVM_Task, 'duration_secs': 0.298047} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.943823] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.944247] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.034804] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963540, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.053767] env[68279]: DEBUG nova.compute.manager [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.053990] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.055030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0211c73a-e32e-48c8-881f-c2f256b07229 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.062916] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.063179] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c0ec526-37ad-42ef-8f51-0bdd3608e87c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.069637] env[68279]: DEBUG oslo_vmware.api [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1008.069637] env[68279]: value = "task-2963546" [ 1008.069637] env[68279]: _type = "Task" [ 1008.069637] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.080473] env[68279]: DEBUG oslo_vmware.api [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963546, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.129307] env[68279]: DEBUG nova.compute.manager [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.129624] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.130439] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dead34-1462-429a-aff3-67e1eca7fe4b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.140247] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.140876] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b83f259c-57ed-49e1-95de-4cff71026aaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.145556] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963545, 'name': Rename_Task, 'duration_secs': 0.140534} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.145961] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.146227] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a16ebc2a-1a3a-48d7-a010-2ea95bca3bce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.150082] env[68279]: DEBUG oslo_vmware.api [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1008.150082] env[68279]: value = "task-2963547" [ 1008.150082] env[68279]: _type = "Task" [ 1008.150082] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.154808] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1008.154808] env[68279]: value = "task-2963548" [ 1008.154808] env[68279]: _type = "Task" [ 1008.154808] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.161634] env[68279]: DEBUG oslo_vmware.api [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.167029] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.172837] env[68279]: DEBUG nova.network.neutron [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updated VIF entry in instance network info cache for port 5959e66b-7a16-41ba-8c1b-adbc5941455e. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.173280] env[68279]: DEBUG nova.network.neutron [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updating instance_info_cache with network_info: [{"id": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "address": "fa:16:3e:67:cf:e5", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5959e66b-7a", "ovs_interfaceid": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.384506] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528e9541-1e19-bdbc-ab13-c32ea54b1f4a, 'name': SearchDatastore_Task, 'duration_secs': 0.009911} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.387102] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.387276] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.387486] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.387646] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.387832] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.388975] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.388975] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1008.388975] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25bcb9c6-88eb-4bbc-9fe3-d3d9dc6fffc1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.390646] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5f45be4-49d6-4a17-bd66-f355436c7fd2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.395756] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1008.395756] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528538c1-99ff-f5e7-b660-b412afcd69f7" [ 1008.395756] env[68279]: _type = "Task" [ 1008.395756] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.403929] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.404144] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.407936] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27cb9398-d686-4a04-ad53-19662b806a7f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.410367] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528538c1-99ff-f5e7-b660-b412afcd69f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.417025] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1008.417025] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52415e3a-5729-2cf9-57c3-1bb58bb3a515" [ 1008.417025] env[68279]: _type = "Task" [ 1008.417025] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.423449] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52415e3a-5729-2cf9-57c3-1bb58bb3a515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.521260] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1008.521668] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594692', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'name': 'volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3763645-5a78-4929-98a3-108e72071211', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'serial': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1008.522594] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692a3519-6036-4f3b-9279-9e06cd3de40c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.535738] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963540, 'name': CloneVM_Task, 'duration_secs': 1.272078} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.546228] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Created linked-clone VM from snapshot [ 1008.549215] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac33fbc6-ec95-4812-a0b5-60e28cc37beb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.551916] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3fb3ea-9e7f-41d0-8ac9-ddbed1b5a58d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.559105] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Uploading image 81dc2fe5-3e8d-4875-aecc-01c7eb0ae113 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1008.583329] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555/volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.586298] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1008.589073] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3367c304-13e3-4dab-af88-8a439a73686c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.601447] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-defaf5d1-72af-40ea-beac-f2486b47daca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.607941] env[68279]: DEBUG oslo_vmware.api [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963546, 'name': PowerOffVM_Task, 'duration_secs': 0.208343} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.609925] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.610263] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.610531] env[68279]: DEBUG oslo_vmware.api [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1008.610531] env[68279]: value = "task-2963549" [ 1008.610531] env[68279]: _type = "Task" [ 1008.610531] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.610759] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 1008.610759] env[68279]: value = "task-2963550" [ 1008.610759] env[68279]: _type = "Task" [ 1008.610759] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.610941] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-711a93e2-aba0-4218-a819-f7c70301cfcc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.631923] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963550, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.632269] env[68279]: DEBUG oslo_vmware.api [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963549, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.665791] env[68279]: DEBUG oslo_vmware.api [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963547, 'name': PowerOffVM_Task, 'duration_secs': 0.184825} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.667039] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.667039] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.667270] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-830f4e2f-9602-46cc-877a-72de03c867fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.671743] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963548, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.675659] env[68279]: DEBUG oslo_concurrency.lockutils [req-505f0220-b91c-4f69-9ffd-502e9d887527 req-b4ddabc9-305a-4c58-9366-c3c446c3c5a9 service nova] Releasing lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.690579] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6640337b-7b57-4cf1-9fb1-481a92cf4200 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.699119] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "e0afa3e5-4a40-4257-851c-3cccf57b1724" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.699428] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.699663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "e0afa3e5-4a40-4257-851c-3cccf57b1724-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.699854] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.700027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.701647] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.701860] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.702079] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleting the datastore file [datastore2] 4dd80f75-13d0-43d7-8042-b175dff50250 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.704588] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a644714a-3664-4982-853c-6d23759dc67b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.706866] env[68279]: INFO nova.compute.manager [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Terminating instance [ 1008.709322] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721c7437-2f07-4e78-ad30-12ca28378856 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.719579] env[68279]: DEBUG oslo_vmware.api [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1008.719579] env[68279]: value = "task-2963553" [ 1008.719579] env[68279]: _type = "Task" [ 1008.719579] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.748824] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26934da3-88e2-4f4c-9f54-53c34170b2c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.752040] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.752132] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.752310] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleting the datastore file [datastore2] 4090e245-b026-4d3a-b7f0-e61543701d8f {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.752567] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbdbc7e4-3a58-44cc-8289-261bc12858fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.760040] env[68279]: DEBUG oslo_vmware.api [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.763412] env[68279]: DEBUG oslo_vmware.api [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1008.763412] env[68279]: value = "task-2963554" [ 1008.763412] env[68279]: _type = "Task" [ 1008.763412] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.764910] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b534dd-ae0c-4406-ade0-68da75fe7593 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.784968] env[68279]: DEBUG nova.compute.provider_tree [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.790137] env[68279]: DEBUG oslo_vmware.api [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963554, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.908358] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528538c1-99ff-f5e7-b660-b412afcd69f7, 'name': SearchDatastore_Task, 'duration_secs': 0.018125} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.908796] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.909038] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.909275] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.926616] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52415e3a-5729-2cf9-57c3-1bb58bb3a515, 'name': SearchDatastore_Task, 'duration_secs': 0.009622} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.927172] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6cd7541-0be1-453d-a1e9-a2f70975a959 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.933066] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1008.933066] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52400707-a7aa-f9c8-bff7-e31c6b6ae152" [ 1008.933066] env[68279]: _type = "Task" [ 1008.933066] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.941835] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52400707-a7aa-f9c8-bff7-e31c6b6ae152, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.124529] env[68279]: DEBUG oslo_vmware.api [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963549, 'name': ReconfigVM_Task, 'duration_secs': 0.404967} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.127952] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Reconfigured VM instance instance-00000038 to attach disk [datastore1] volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555/volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1009.132132] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963550, 'name': Destroy_Task, 'duration_secs': 0.347022} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.132350] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00e49938-a725-4310-90c8-b82a8ff9577d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.142097] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Destroyed the VM [ 1009.142346] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1009.142593] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bb8250c8-5899-4441-87bf-2213a12f86c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.151187] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 1009.151187] env[68279]: value = "task-2963555" [ 1009.151187] env[68279]: _type = "Task" [ 1009.151187] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.152382] env[68279]: DEBUG oslo_vmware.api [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1009.152382] env[68279]: value = "task-2963556" [ 1009.152382] env[68279]: _type = "Task" [ 1009.152382] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.165012] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963555, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.168602] env[68279]: DEBUG oslo_vmware.api [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963556, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.173127] env[68279]: DEBUG oslo_vmware.api [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963548, 'name': PowerOnVM_Task, 'duration_secs': 0.664041} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.173292] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.173669] env[68279]: INFO nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Took 7.03 seconds to spawn the instance on the hypervisor. [ 1009.174431] env[68279]: DEBUG nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1009.174568] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c3c5f9-933c-4cb6-aad7-485becf72e80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.216829] env[68279]: DEBUG nova.compute.manager [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.217149] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.218043] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eed6aa8-e853-4f90-b15c-83c02fe092f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.226686] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.227306] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b3797bb-47c4-49d5-b391-5c8a64c8671f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.234458] env[68279]: DEBUG oslo_vmware.api [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 1009.234458] env[68279]: value = "task-2963557" [ 1009.234458] env[68279]: _type = "Task" [ 1009.234458] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.242477] env[68279]: DEBUG oslo_vmware.api [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.251970] env[68279]: DEBUG oslo_vmware.api [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164653} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.252231] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.252418] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.252596] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.252767] env[68279]: INFO nova.compute.manager [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1009.253009] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.253213] env[68279]: DEBUG nova.compute.manager [-] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.253310] env[68279]: DEBUG nova.network.neutron [-] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.273349] env[68279]: DEBUG oslo_vmware.api [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963554, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168338} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.273613] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.273826] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.274019] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.274193] env[68279]: INFO nova.compute.manager [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1009.274428] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.274615] env[68279]: DEBUG nova.compute.manager [-] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.274705] env[68279]: DEBUG nova.network.neutron [-] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.290997] env[68279]: DEBUG nova.scheduler.client.report [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.451770] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52400707-a7aa-f9c8-bff7-e31c6b6ae152, 'name': SearchDatastore_Task, 'duration_secs': 0.010527} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.452155] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.452495] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.452835] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.453167] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.453424] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2ff3c5f-57a3-4f69-ba33-82a5aaf9323c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.456041] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dc8a9a2-87b8-447d-a7a9-04f01a6c4efa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.463997] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1009.463997] env[68279]: value = "task-2963558" [ 1009.463997] env[68279]: _type = "Task" [ 1009.463997] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.469333] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.469844] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1009.474291] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b9098ff-cbd6-40e7-ada3-5ecb2a9c3862 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.476998] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.480070] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1009.480070] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288f1cf-5de2-0622-bea4-506a351c483e" [ 1009.480070] env[68279]: _type = "Task" [ 1009.480070] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.489264] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288f1cf-5de2-0622-bea4-506a351c483e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.652761] env[68279]: DEBUG nova.compute.manager [req-114a661e-65d4-478f-88de-d0e55f726aec req-ee82a135-9c29-4a28-85dc-9546c2357702 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Received event network-vif-deleted-d80b24ec-9d0e-4fd6-8503-b7081455b339 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.653350] env[68279]: INFO nova.compute.manager [req-114a661e-65d4-478f-88de-d0e55f726aec req-ee82a135-9c29-4a28-85dc-9546c2357702 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Neutron deleted interface d80b24ec-9d0e-4fd6-8503-b7081455b339; detaching it from the instance and deleting it from the info cache [ 1009.653637] env[68279]: DEBUG nova.network.neutron [req-114a661e-65d4-478f-88de-d0e55f726aec req-ee82a135-9c29-4a28-85dc-9546c2357702 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.668978] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963555, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.672875] env[68279]: DEBUG oslo_vmware.api [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963556, 'name': ReconfigVM_Task, 'duration_secs': 0.149395} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.674159] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594692', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'name': 'volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3763645-5a78-4929-98a3-108e72071211', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'serial': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1009.676796] env[68279]: DEBUG nova.compute.manager [req-233b9ea1-daf1-4453-b73c-43ffb0ccb84e req-2b3d5132-4eec-4d26-82ef-07639029edfc service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Received event network-vif-deleted-14ed552c-b208-40b2-969d-fea6c41a4a0d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.677013] env[68279]: INFO nova.compute.manager [req-233b9ea1-daf1-4453-b73c-43ffb0ccb84e req-2b3d5132-4eec-4d26-82ef-07639029edfc service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Neutron deleted interface 14ed552c-b208-40b2-969d-fea6c41a4a0d; detaching it from the instance and deleting it from the info cache [ 1009.677214] env[68279]: DEBUG nova.network.neutron [req-233b9ea1-daf1-4453-b73c-43ffb0ccb84e req-2b3d5132-4eec-4d26-82ef-07639029edfc service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.693374] env[68279]: INFO nova.compute.manager [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Took 38.11 seconds to build instance. [ 1009.746080] env[68279]: DEBUG oslo_vmware.api [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963557, 'name': PowerOffVM_Task, 'duration_secs': 0.235785} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.746080] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.746080] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.746080] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a258f2b5-f2be-4c9b-8a44-08e6daf3a19a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.796869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.797491] env[68279]: DEBUG nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1009.801837] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.526s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.802108] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.804903] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.777s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.805293] env[68279]: DEBUG nova.objects.instance [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'resources' on Instance uuid 01404bab-6516-4783-8b9d-0738010b3e9b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.809496] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.809496] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.809496] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleting the datastore file [datastore2] e0afa3e5-4a40-4257-851c-3cccf57b1724 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.809931] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14886219-2bd4-4c6c-b556-88681603c1bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.817284] env[68279]: DEBUG oslo_vmware.api [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 1009.817284] env[68279]: value = "task-2963560" [ 1009.817284] env[68279]: _type = "Task" [ 1009.817284] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.829743] env[68279]: DEBUG oslo_vmware.api [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.836831] env[68279]: INFO nova.scheduler.client.report [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted allocations for instance 866eb440-4fc9-4708-8a3b-b53f2be3f6c8 [ 1009.975411] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963558, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.993180] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5288f1cf-5de2-0622-bea4-506a351c483e, 'name': SearchDatastore_Task, 'duration_secs': 0.007956} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.994082] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81a31a8b-6fa2-4c32-9629-65b821dbd17e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.001537] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1010.001537] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ce0658-2d57-1ca8-8f53-51df4efedbbe" [ 1010.001537] env[68279]: _type = "Task" [ 1010.001537] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.011162] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ce0658-2d57-1ca8-8f53-51df4efedbbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.019777] env[68279]: DEBUG nova.network.neutron [-] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.057795] env[68279]: DEBUG nova.network.neutron [-] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.159306] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9ec9e6d-7b89-4ad8-8c32-60809345ba41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.167350] env[68279]: DEBUG oslo_vmware.api [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963555, 'name': RemoveSnapshot_Task, 'duration_secs': 1.012642} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.168606] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1010.173808] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6062dd-5c54-4c20-a426-9550632dbff8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.187623] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee139b63-f5ca-442a-8d75-25cae4c73ec4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.197041] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac5d70c-cf44-4b9c-a6ef-36b247c85704 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.227942] env[68279]: DEBUG oslo_concurrency.lockutils [None req-60fb7ee3-369f-4e14-a0a0-06242ab6e1a2 tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.652s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.228690] env[68279]: DEBUG nova.compute.manager [req-114a661e-65d4-478f-88de-d0e55f726aec req-ee82a135-9c29-4a28-85dc-9546c2357702 service nova] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Detach interface failed, port_id=d80b24ec-9d0e-4fd6-8503-b7081455b339, reason: Instance 4090e245-b026-4d3a-b7f0-e61543701d8f could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1010.260313] env[68279]: DEBUG nova.compute.manager [req-233b9ea1-daf1-4453-b73c-43ffb0ccb84e req-2b3d5132-4eec-4d26-82ef-07639029edfc service nova] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Detach interface failed, port_id=14ed552c-b208-40b2-969d-fea6c41a4a0d, reason: Instance 4dd80f75-13d0-43d7-8042-b175dff50250 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1010.303227] env[68279]: DEBUG nova.compute.utils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1010.304797] env[68279]: DEBUG nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1010.304967] env[68279]: DEBUG nova.network.neutron [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1010.308763] env[68279]: DEBUG nova.objects.instance [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'numa_topology' on Instance uuid 01404bab-6516-4783-8b9d-0738010b3e9b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.329747] env[68279]: DEBUG oslo_vmware.api [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315835} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.329919] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.333067] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.333067] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.333067] env[68279]: INFO nova.compute.manager [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1010.333067] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.333067] env[68279]: DEBUG nova.compute.manager [-] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1010.333067] env[68279]: DEBUG nova.network.neutron [-] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1010.344014] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9aa77272-7657-46ad-907c-de6004600388 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "866eb440-4fc9-4708-8a3b-b53f2be3f6c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.579s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.380475] env[68279]: DEBUG nova.policy [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36b39430ec184c72b8950247fd1added', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37b1b1fd2ea44d83b954e5b90ae9e3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1010.480522] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963558, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.511510] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ce0658-2d57-1ca8-8f53-51df4efedbbe, 'name': SearchDatastore_Task, 'duration_secs': 0.066237} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.511801] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.512080] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1010.512340] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2945813-d720-4d64-be08-d963be585aa9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.519569] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1010.519569] env[68279]: value = "task-2963561" [ 1010.519569] env[68279]: _type = "Task" [ 1010.519569] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.523177] env[68279]: INFO nova.compute.manager [-] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Took 1.25 seconds to deallocate network for instance. [ 1010.537477] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.560544] env[68279]: INFO nova.compute.manager [-] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Took 1.31 seconds to deallocate network for instance. [ 1010.686995] env[68279]: WARNING nova.compute.manager [None req-6c046b72-7f1a-4483-b352-c0fc0bd9fc66 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Image not found during snapshot: nova.exception.ImageNotFound: Image 81dc2fe5-3e8d-4875-aecc-01c7eb0ae113 could not be found. [ 1010.734104] env[68279]: DEBUG nova.objects.instance [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.807962] env[68279]: DEBUG nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1010.810793] env[68279]: DEBUG nova.objects.base [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Object Instance<01404bab-6516-4783-8b9d-0738010b3e9b> lazy-loaded attributes: resources,numa_topology {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1010.978156] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963558, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.031288] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.037317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.037649] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.070015] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.182268] env[68279]: DEBUG nova.network.neutron [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Successfully created port: 4add0663-1007-446c-82db-6e177e9a9a8b {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1011.192866] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df893c5b-ab57-45bc-a586-d08014f350b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.203189] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e75f235-fd8b-491e-9306-d13ea484871e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.236306] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278d8a30-3c65-413e-ac08-a75050fa33bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.242295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-42f9e291-4750-4ff3-80b1-39756cc2ffbc tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.836s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.243132] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.205s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.243132] env[68279]: DEBUG nova.compute.manager [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1011.244052] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd0dddc-8e5c-42fa-9fd6-09ffb3963fdd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.252166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e464b58b-0013-4ea4-8541-b410cd7960f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.259137] env[68279]: DEBUG nova.compute.manager [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1011.259855] env[68279]: DEBUG nova.objects.instance [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.277251] env[68279]: DEBUG nova.compute.provider_tree [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.339933] env[68279]: DEBUG nova.network.neutron [-] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.479334] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963558, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.640332} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.479708] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1011.479929] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1011.480206] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c64409d6-0d98-4348-8ba4-9daad5cdfd8e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.488952] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1011.488952] env[68279]: value = "task-2963562" [ 1011.488952] env[68279]: _type = "Task" [ 1011.488952] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.498011] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.533227] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963561, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.697008] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "1bd92b53-46c0-4b63-be20-857cffed87cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.697366] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.697555] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "1bd92b53-46c0-4b63-be20-857cffed87cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.697745] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.697912] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.700127] env[68279]: INFO nova.compute.manager [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Terminating instance [ 1011.748078] env[68279]: DEBUG nova.compute.manager [req-e98065d6-b9bd-48d1-9930-9d95085be89e req-8f63de66-da2d-45eb-84e2-ff78b8fbb4c0 service nova] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Received event network-vif-deleted-746b72fd-b408-4ee5-94ec-46790f1160f8 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.768314] env[68279]: DEBUG nova.compute.manager [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Received event network-changed-a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.768934] env[68279]: DEBUG nova.compute.manager [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Refreshing instance network info cache due to event network-changed-a36ff15f-dc24-4fe7-aaf1-66caad63a54f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1011.768934] env[68279]: DEBUG oslo_concurrency.lockutils [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] Acquiring lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.768934] env[68279]: DEBUG oslo_concurrency.lockutils [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] Acquired lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.769541] env[68279]: DEBUG nova.network.neutron [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Refreshing network info cache for port a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1011.780622] env[68279]: DEBUG nova.scheduler.client.report [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.821091] env[68279]: DEBUG nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1011.845568] env[68279]: INFO nova.compute.manager [-] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Took 1.51 seconds to deallocate network for instance. [ 1011.858785] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1011.859048] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.859207] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1011.859402] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.859563] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1011.859703] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1011.859901] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1011.860193] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1011.860534] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1011.860768] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1011.860998] env[68279]: DEBUG nova.virt.hardware [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1011.861912] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8d4c99-1986-4d49-8a54-a80b338d10f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.872989] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d249fb4-45e2-410a-82ff-eb394f64efa4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.001565] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.243457} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.001984] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1012.002846] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a86943-f2e6-4a21-bb75-fcfe05fcebd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.031221] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.031534] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9644089a-b7e4-417f-95f6-1d82154e19a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.056670] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963561, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.193709} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.059510] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1012.059834] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1012.060033] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1012.060033] env[68279]: value = "task-2963563" [ 1012.060033] env[68279]: _type = "Task" [ 1012.060033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.060322] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1eb3e6e8-d41b-4028-9b25-e763d7380802 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.072936] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1012.072936] env[68279]: value = "task-2963564" [ 1012.072936] env[68279]: _type = "Task" [ 1012.072936] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.080586] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963564, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.204094] env[68279]: DEBUG nova.compute.manager [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1012.204379] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.205313] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b92555-5a47-4a80-87af-5d0c91bc3c20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.213369] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.213649] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c32d9502-decd-4f62-9a8d-ac62d70ab457 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.219702] env[68279]: DEBUG oslo_vmware.api [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 1012.219702] env[68279]: value = "task-2963565" [ 1012.219702] env[68279]: _type = "Task" [ 1012.219702] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.227909] env[68279]: DEBUG oslo_vmware.api [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.272021] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.274602] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84d0a921-9c8e-4899-b00e-baa573d0d051 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.282251] env[68279]: DEBUG oslo_vmware.api [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1012.282251] env[68279]: value = "task-2963566" [ 1012.282251] env[68279]: _type = "Task" [ 1012.282251] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.287187] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.481s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.289098] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.956s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.289286] env[68279]: DEBUG nova.objects.instance [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1012.297739] env[68279]: DEBUG oslo_vmware.api [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.354860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.505521] env[68279]: DEBUG nova.network.neutron [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Updated VIF entry in instance network info cache for port a36ff15f-dc24-4fe7-aaf1-66caad63a54f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1012.505804] env[68279]: DEBUG nova.network.neutron [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Updating instance_info_cache with network_info: [{"id": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "address": "fa:16:3e:b4:5c:2f", "network": {"id": "7aa6c0b4-01c0-4b5e-b7c1-19824b0dde00", "bridge": "br-int", "label": "tempest-ServersTestJSON-597296579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f54b17f9e714a32ae5c97117a87745c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa36ff15f-dc", "ovs_interfaceid": "a36ff15f-dc24-4fe7-aaf1-66caad63a54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.572164] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963563, 'name': ReconfigVM_Task, 'duration_secs': 0.321005} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.572444] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.573071] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7b0dd52-8ec1-4536-a3d6-1000d6532d8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.582557] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963564, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16842} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.583656] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1012.583656] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1012.583656] env[68279]: value = "task-2963567" [ 1012.583656] env[68279]: _type = "Task" [ 1012.583656] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.584234] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6668d8c6-f981-4e63-bd2e-bcb57d885e8d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.594449] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963567, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.609717] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.610015] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c80480a-43c6-48a3-84cd-f89e9044ce3c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.629423] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1012.629423] env[68279]: value = "task-2963568" [ 1012.629423] env[68279]: _type = "Task" [ 1012.629423] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.637348] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963568, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.730028] env[68279]: DEBUG oslo_vmware.api [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963565, 'name': PowerOffVM_Task, 'duration_secs': 0.324964} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.730185] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.730357] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.730611] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74dc6e26-bab1-4a4a-889c-f5adbb9962a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.798020] env[68279]: DEBUG oslo_vmware.api [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963566, 'name': PowerOffVM_Task, 'duration_secs': 0.209782} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.801929] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.803023] env[68279]: DEBUG nova.compute.manager [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.803023] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.803023] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.803023] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleting the datastore file [datastore1] 1bd92b53-46c0-4b63-be20-857cffed87cd {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.803290] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bc803e37-de84-4ea1-abc9-e83d074e017a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 45.663s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.805195] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76832864-8659-40a6-baf2-1369366f1fe3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.808127] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7840c57-610d-4e71-bca5-a70607d13a52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.809873] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 24.439s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.810116] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.810328] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.810499] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.813470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cf6d4d-91c8-4176-92fd-064a24006b8e tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.524s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.814989] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.862s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.815220] env[68279]: DEBUG nova.objects.instance [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lazy-loading 'resources' on Instance uuid daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.816922] env[68279]: INFO nova.compute.manager [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Terminating instance [ 1012.824872] env[68279]: DEBUG oslo_vmware.api [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for the task: (returnval){ [ 1012.824872] env[68279]: value = "task-2963570" [ 1012.824872] env[68279]: _type = "Task" [ 1012.824872] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.837037] env[68279]: DEBUG oslo_vmware.api [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.008668] env[68279]: DEBUG oslo_concurrency.lockutils [req-4e3a779f-ad00-4ae9-99bd-55c6381a8107 req-ea1a4fdb-c96d-42b9-b85c-e8d307aa9a23 service nova] Releasing lock "refresh_cache-efda54fe-09a3-4653-b16a-8b3cdd4849c5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.102399] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963567, 'name': Rename_Task, 'duration_secs': 0.140846} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.104085] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.104085] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8807367-e83b-4bed-beb6-93b73dcd59de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.109903] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1013.109903] env[68279]: value = "task-2963571" [ 1013.109903] env[68279]: _type = "Task" [ 1013.109903] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.118071] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.139064] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963568, 'name': ReconfigVM_Task, 'duration_secs': 0.263376} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.140189] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Reconfigured VM instance instance-0000005b to attach disk [datastore1] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1013.140825] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67245ab9-5bbd-4865-80be-8743d11f9ea8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.147235] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1013.147235] env[68279]: value = "task-2963572" [ 1013.147235] env[68279]: _type = "Task" [ 1013.147235] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.158845] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963572, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.317745] env[68279]: DEBUG nova.objects.instance [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lazy-loading 'numa_topology' on Instance uuid daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.320485] env[68279]: DEBUG nova.compute.manager [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1013.320721] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1013.321017] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-301bc058-df5e-4f9d-930d-97061fc57a2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.328596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58dc2fc4-e97c-4c24-8505-8d22de91943f tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.086s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.337627] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227d4d22-5e59-48f4-ac6f-00e9d1b33c60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.354843] env[68279]: DEBUG oslo_vmware.api [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Task: {'id': task-2963570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126428} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.355137] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.355324] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.355500] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.355673] env[68279]: INFO nova.compute.manager [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1013.355911] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1013.356118] env[68279]: DEBUG nova.compute.manager [-] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1013.356216] env[68279]: DEBUG nova.network.neutron [-] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.373602] env[68279]: WARNING nova.virt.vmwareapi.vmops [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01404bab-6516-4783-8b9d-0738010b3e9b could not be found. [ 1013.373826] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.374031] env[68279]: INFO nova.compute.manager [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1013.374267] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1013.374539] env[68279]: DEBUG nova.compute.manager [-] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1013.374625] env[68279]: DEBUG nova.network.neutron [-] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.564647] env[68279]: DEBUG nova.network.neutron [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Successfully updated port: 4add0663-1007-446c-82db-6e177e9a9a8b {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1013.619793] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963571, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.658240] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963572, 'name': Rename_Task, 'duration_secs': 0.169683} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.658712] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.658779] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd4e676c-7a0e-403b-900f-3b8e1c92bb82 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.668032] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1013.668032] env[68279]: value = "task-2963573" [ 1013.668032] env[68279]: _type = "Task" [ 1013.668032] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.675361] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963573, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.820535] env[68279]: DEBUG nova.objects.base [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1013.914912] env[68279]: DEBUG nova.compute.manager [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Received event network-vif-plugged-4add0663-1007-446c-82db-6e177e9a9a8b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1013.915185] env[68279]: DEBUG oslo_concurrency.lockutils [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] Acquiring lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.915313] env[68279]: DEBUG oslo_concurrency.lockutils [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.915481] env[68279]: DEBUG oslo_concurrency.lockutils [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.915658] env[68279]: DEBUG nova.compute.manager [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] No waiting events found dispatching network-vif-plugged-4add0663-1007-446c-82db-6e177e9a9a8b {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1013.915658] env[68279]: WARNING nova.compute.manager [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Received unexpected event network-vif-plugged-4add0663-1007-446c-82db-6e177e9a9a8b for instance with vm_state building and task_state spawning. [ 1013.915658] env[68279]: DEBUG nova.compute.manager [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Received event network-changed-4add0663-1007-446c-82db-6e177e9a9a8b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1013.916169] env[68279]: DEBUG nova.compute.manager [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Refreshing instance network info cache due to event network-changed-4add0663-1007-446c-82db-6e177e9a9a8b. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1013.916257] env[68279]: DEBUG oslo_concurrency.lockutils [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] Acquiring lock "refresh_cache-f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.916403] env[68279]: DEBUG oslo_concurrency.lockutils [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] Acquired lock "refresh_cache-f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.916559] env[68279]: DEBUG nova.network.neutron [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Refreshing network info cache for port 4add0663-1007-446c-82db-6e177e9a9a8b {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1014.069624] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "refresh_cache-f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.121666] env[68279]: DEBUG oslo_vmware.api [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963571, 'name': PowerOnVM_Task, 'duration_secs': 0.767815} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.121935] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.122183] env[68279]: INFO nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1014.122360] env[68279]: DEBUG nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.123120] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e988bbb1-fee2-4c1b-9642-46ee79f68072 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.142320] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c36c21b-4d60-44f8-b0b6-60a1562db5dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.149013] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384a3a35-15d0-421d-a244-cabeb5d23854 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.177795] env[68279]: DEBUG nova.objects.instance [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.183175] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236b2794-47da-48b0-8d40-37e8efdd1348 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.193742] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963573, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.196955] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3161db67-a354-43ca-b3c7-8c44f850b6d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.211135] env[68279]: DEBUG nova.compute.provider_tree [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.248357] env[68279]: DEBUG nova.network.neutron [-] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.264745] env[68279]: DEBUG nova.network.neutron [-] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.448207] env[68279]: DEBUG nova.network.neutron [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.524656] env[68279]: DEBUG nova.network.neutron [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.641216] env[68279]: INFO nova.compute.manager [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Took 38.38 seconds to build instance. [ 1014.685264] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.685448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.685626] env[68279]: DEBUG nova.network.neutron [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1014.685799] env[68279]: DEBUG nova.objects.instance [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'info_cache' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.692885] env[68279]: DEBUG oslo_vmware.api [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963573, 'name': PowerOnVM_Task, 'duration_secs': 0.77359} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.693353] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.693578] env[68279]: INFO nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Took 7.41 seconds to spawn the instance on the hypervisor. [ 1014.693760] env[68279]: DEBUG nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.694612] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8342b783-5d3c-4fc3-ba5f-154b98078821 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.714988] env[68279]: DEBUG nova.scheduler.client.report [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.751394] env[68279]: INFO nova.compute.manager [-] [instance: 01404bab-6516-4783-8b9d-0738010b3e9b] Took 1.38 seconds to deallocate network for instance. [ 1014.768025] env[68279]: INFO nova.compute.manager [-] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Took 1.41 seconds to deallocate network for instance. [ 1015.027666] env[68279]: DEBUG oslo_concurrency.lockutils [req-edea0cb4-92ff-48a1-a9b9-20eb3b3cb9b3 req-fe1423de-fd6c-4daa-92c4-acb7723428b7 service nova] Releasing lock "refresh_cache-f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.028093] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "refresh_cache-f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.028257] env[68279]: DEBUG nova.network.neutron [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.145766] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9397c553-e95f-47f8-8773-75acf382d2bd tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.897s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.189647] env[68279]: DEBUG nova.objects.base [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1015.211656] env[68279]: INFO nova.compute.manager [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Took 36.18 seconds to build instance. [ 1015.219789] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.405s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.224914] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.165s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.227067] env[68279]: INFO nova.compute.claims [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1015.274758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.559474] env[68279]: DEBUG nova.network.neutron [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.692059] env[68279]: DEBUG nova.network.neutron [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Updating instance_info_cache with network_info: [{"id": "4add0663-1007-446c-82db-6e177e9a9a8b", "address": "fa:16:3e:49:f2:b3", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4add0663-10", "ovs_interfaceid": "4add0663-1007-446c-82db-6e177e9a9a8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.713996] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5c627700-fa4e-4e56-a7ee-589b093c452a tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "e1b12b1c-5755-41eb-b550-88c573a09877" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.692s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.736358] env[68279]: DEBUG oslo_concurrency.lockutils [None req-139101ed-ab51-4b9b-abf4-97bbb4969a8a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 44.300s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.737158] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 22.455s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.737572] env[68279]: INFO nova.compute.manager [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Unshelving [ 1015.787201] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b5af9ce7-a1a7-465b-8196-e772f8ce11a9 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "01404bab-6516-4783-8b9d-0738010b3e9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.977s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.861885] env[68279]: INFO nova.compute.manager [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Rebuilding instance [ 1015.912135] env[68279]: DEBUG nova.compute.manager [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.913062] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12d49c1-bcb1-4a4d-9929-7f0ee86ede98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.923733] env[68279]: DEBUG nova.compute.manager [req-620d3b7b-2deb-4284-ac0f-cd0a39c0a74e req-dbea0d7b-c503-4bec-8c16-48b7848489f2 service nova] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Received event network-vif-deleted-e77b7975-dc57-4094-9c2d-546c6bce9653 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.931177] env[68279]: DEBUG nova.network.neutron [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating instance_info_cache with network_info: [{"id": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "address": "fa:16:3e:0c:82:62", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1c4e041-ce", "ovs_interfaceid": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.948669] env[68279]: DEBUG nova.compute.manager [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Received event network-changed-5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.949125] env[68279]: DEBUG nova.compute.manager [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Refreshing instance network info cache due to event network-changed-5959e66b-7a16-41ba-8c1b-adbc5941455e. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1015.949917] env[68279]: DEBUG oslo_concurrency.lockutils [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] Acquiring lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.949917] env[68279]: DEBUG oslo_concurrency.lockutils [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] Acquired lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.949917] env[68279]: DEBUG nova.network.neutron [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Refreshing network info cache for port 5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.194771] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "refresh_cache-f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.195113] env[68279]: DEBUG nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Instance network_info: |[{"id": "4add0663-1007-446c-82db-6e177e9a9a8b", "address": "fa:16:3e:49:f2:b3", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4add0663-10", "ovs_interfaceid": "4add0663-1007-446c-82db-6e177e9a9a8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1016.195543] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:f2:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4add0663-1007-446c-82db-6e177e9a9a8b', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.204124] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.204374] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.204619] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daff4a97-9f67-42d6-b47f-a6ee80f212b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.225688] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.225688] env[68279]: value = "task-2963574" [ 1016.225688] env[68279]: _type = "Task" [ 1016.225688] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.233977] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963574, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.433960] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.585235] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052eb8e2-6fa9-4cc4-b67c-7d061ac8921b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.600295] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae176dd1-0837-44d9-a95e-c50c37599caf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.634244] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33f1c02-e77a-4a5c-a7ee-627014a96b76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.644237] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc81c1f-69af-4b13-be58-7af773e124d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.658351] env[68279]: DEBUG nova.compute.provider_tree [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.709284] env[68279]: DEBUG nova.network.neutron [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updated VIF entry in instance network info cache for port 5959e66b-7a16-41ba-8c1b-adbc5941455e. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1016.709739] env[68279]: DEBUG nova.network.neutron [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updating instance_info_cache with network_info: [{"id": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "address": "fa:16:3e:67:cf:e5", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5959e66b-7a", "ovs_interfaceid": "5959e66b-7a16-41ba-8c1b-adbc5941455e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.737722] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963574, 'name': CreateVM_Task, 'duration_secs': 0.347838} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.737943] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1016.738908] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.739102] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.739452] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1016.739714] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c7d3256-c6e1-490d-b27b-3b1a93cfafab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.745238] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1016.745238] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52330a00-61f2-e050-2a99-deafb9c35c48" [ 1016.745238] env[68279]: _type = "Task" [ 1016.745238] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.757657] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52330a00-61f2-e050-2a99-deafb9c35c48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.764033] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.928191] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.928506] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-565aab14-42aa-4801-8916-b8ae40c6e5da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.935396] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1016.935396] env[68279]: value = "task-2963575" [ 1016.935396] env[68279]: _type = "Task" [ 1016.935396] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.946584] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.103537] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.103760] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.162506] env[68279]: DEBUG nova.scheduler.client.report [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.212731] env[68279]: DEBUG oslo_concurrency.lockutils [req-fadc39ae-c467-4fa8-b5f5-6439f7585db0 req-7240f9c3-109e-407d-bfd0-95d950a929a1 service nova] Releasing lock "refresh_cache-50f390b2-99b7-49f3-997f-7d7b50cff9f2" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.256072] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52330a00-61f2-e050-2a99-deafb9c35c48, 'name': SearchDatastore_Task, 'duration_secs': 0.011737} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.256382] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.256619] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.256860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.257016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.257210] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.257480] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1142a745-c457-40fd-9085-e1a732b88b8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.265755] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.266012] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.266723] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac59c247-66e3-4974-953d-f8033fc3b7f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.271971] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1017.271971] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528622c0-9d80-673b-bc7f-65f2b79e6603" [ 1017.271971] env[68279]: _type = "Task" [ 1017.271971] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.279206] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528622c0-9d80-673b-bc7f-65f2b79e6603, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.441794] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1017.444932] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64e8714d-6c77-46c4-9af7-4d00bd0cc683 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.446410] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963575, 'name': PowerOffVM_Task, 'duration_secs': 0.239874} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.446657] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.446873] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1017.447873] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00abe2a2-2cc4-4bf9-a68c-f3460b6acad0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.451949] env[68279]: DEBUG oslo_vmware.api [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1017.451949] env[68279]: value = "task-2963576" [ 1017.451949] env[68279]: _type = "Task" [ 1017.451949] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.456478] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.456998] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7be6550-076b-4c85-8cd5-b93e3a24bea9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.461315] env[68279]: DEBUG oslo_vmware.api [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.484891] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.485206] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.485403] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Deleting the datastore file [datastore1] e1b12b1c-5755-41eb-b550-88c573a09877 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.485670] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9684a61d-22e0-4993-8d01-11d29461c412 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.491243] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1017.491243] env[68279]: value = "task-2963578" [ 1017.491243] env[68279]: _type = "Task" [ 1017.491243] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.499243] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963578, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.606453] env[68279]: DEBUG nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1017.668930] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.669581] env[68279]: DEBUG nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1017.672255] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.066s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.672541] env[68279]: DEBUG nova.objects.instance [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lazy-loading 'resources' on Instance uuid 99024851-0add-44b9-a70a-2e242180d6a9 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.782045] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528622c0-9d80-673b-bc7f-65f2b79e6603, 'name': SearchDatastore_Task, 'duration_secs': 0.007829} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.845225] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-873db93b-defc-45f5-a5e4-2f5286f8c0e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.845225] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1017.845225] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528eac28-8602-958c-f32c-f42773cb920e" [ 1017.845225] env[68279]: _type = "Task" [ 1017.845225] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.845225] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528eac28-8602-958c-f32c-f42773cb920e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.965277] env[68279]: DEBUG oslo_vmware.api [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963576, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.001504] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963578, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086786} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.001843] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.002059] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1018.002780] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1018.129313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.175658] env[68279]: DEBUG nova.compute.utils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1018.180403] env[68279]: DEBUG nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1018.180582] env[68279]: DEBUG nova.network.neutron [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1018.231059] env[68279]: DEBUG nova.policy [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655eae57bb1349c0a229c3b57f4d3446', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f947b60992d543c4b0bfee2553bfe357', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1018.300792] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528eac28-8602-958c-f32c-f42773cb920e, 'name': SearchDatastore_Task, 'duration_secs': 0.01041} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.301058] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.301319] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b/f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.301565] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5409bc0-5938-42cb-90db-566bdd7fcbb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.307711] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1018.307711] env[68279]: value = "task-2963579" [ 1018.307711] env[68279]: _type = "Task" [ 1018.307711] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.318803] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963579, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.462194] env[68279]: DEBUG oslo_vmware.api [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963576, 'name': PowerOnVM_Task, 'duration_secs': 0.641507} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.465701] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.465701] env[68279]: DEBUG nova.compute.manager [None req-9282ed2d-6cdc-48b2-ad09-08b190bf72d7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.466792] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbdddf4-17d1-4822-9a9e-13cd94ec348c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.566885] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f39bc57-f4bf-4098-9688-66751c705e4e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.570455] env[68279]: DEBUG nova.network.neutron [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Successfully created port: 38ecf3bb-21fe-4683-8cc4-40e133bebe1f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1018.578831] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918a5ac3-fa59-4243-9fda-eeb400af0ac1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.610056] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7757b8d-0fc6-4ac1-808c-de4880486ffd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.618501] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c45bb8-b9b4-4042-ac61-05d3b8620e76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.634425] env[68279]: DEBUG nova.compute.provider_tree [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.681652] env[68279]: DEBUG nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1018.817898] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963579, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466628} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.818131] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b/f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1018.818351] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1018.818678] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d554353-621e-428e-828e-12eb28c1cd3f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.824767] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1018.824767] env[68279]: value = "task-2963580" [ 1018.824767] env[68279]: _type = "Task" [ 1018.824767] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.833229] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.038680] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1019.038915] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.039165] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.039442] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.039648] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.039813] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1019.040027] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1019.040186] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1019.040344] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1019.040693] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1019.040905] env[68279]: DEBUG nova.virt.hardware [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1019.041976] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6d4fc7-b4d0-4045-a43e-10eee54df7eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.050444] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36ec99f-a0a5-44ee-ad5d-a81bd0c424a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.065738] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1019.071613] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.071871] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1019.072121] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9dace85-d027-42c3-ab7e-35b95671ae35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.089333] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1019.089333] env[68279]: value = "task-2963581" [ 1019.089333] env[68279]: _type = "Task" [ 1019.089333] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.098323] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963581, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.138051] env[68279]: DEBUG nova.scheduler.client.report [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.334206] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077888} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.334502] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.335270] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258b2a74-34d3-4405-a638-964b5450aab9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.360264] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b/f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.360597] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c856e0ad-c186-4b18-bc82-a663da478818 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.381582] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1019.381582] env[68279]: value = "task-2963582" [ 1019.381582] env[68279]: _type = "Task" [ 1019.381582] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.389676] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963582, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.600755] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963581, 'name': CreateVM_Task, 'duration_secs': 0.310003} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.600931] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.601452] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.601645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.602031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1019.602324] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9bc92bd-a53c-4034-86bd-1b7791b843eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.607450] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1019.607450] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dd2fe4-4393-ba93-cac3-52aca1b788e3" [ 1019.607450] env[68279]: _type = "Task" [ 1019.607450] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.616677] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dd2fe4-4393-ba93-cac3-52aca1b788e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.645820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.647041] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.369s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.648391] env[68279]: INFO nova.compute.claims [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.670134] env[68279]: INFO nova.scheduler.client.report [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Deleted allocations for instance 99024851-0add-44b9-a70a-2e242180d6a9 [ 1019.691282] env[68279]: DEBUG nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1019.714152] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1019.714152] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.714152] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.714152] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.714152] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.714641] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1019.714641] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1019.714641] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1019.714836] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1019.714836] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1019.715230] env[68279]: DEBUG nova.virt.hardware [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1019.715906] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca843226-03a6-4f58-9bbf-5b2de15fb5a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.724738] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adc10ee-f8a0-419b-ad12-4661856a3c62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.892556] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963582, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.976035] env[68279]: DEBUG nova.compute.manager [req-5a74f4a3-8392-4dba-8ca5-94f9c8d2e925 req-153b325a-7864-4ee0-8dea-8b31ba353b1d service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Received event network-vif-plugged-38ecf3bb-21fe-4683-8cc4-40e133bebe1f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.976316] env[68279]: DEBUG oslo_concurrency.lockutils [req-5a74f4a3-8392-4dba-8ca5-94f9c8d2e925 req-153b325a-7864-4ee0-8dea-8b31ba353b1d service nova] Acquiring lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.976527] env[68279]: DEBUG oslo_concurrency.lockutils [req-5a74f4a3-8392-4dba-8ca5-94f9c8d2e925 req-153b325a-7864-4ee0-8dea-8b31ba353b1d service nova] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.976710] env[68279]: DEBUG oslo_concurrency.lockutils [req-5a74f4a3-8392-4dba-8ca5-94f9c8d2e925 req-153b325a-7864-4ee0-8dea-8b31ba353b1d service nova] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.976850] env[68279]: DEBUG nova.compute.manager [req-5a74f4a3-8392-4dba-8ca5-94f9c8d2e925 req-153b325a-7864-4ee0-8dea-8b31ba353b1d service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] No waiting events found dispatching network-vif-plugged-38ecf3bb-21fe-4683-8cc4-40e133bebe1f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1019.977017] env[68279]: WARNING nova.compute.manager [req-5a74f4a3-8392-4dba-8ca5-94f9c8d2e925 req-153b325a-7864-4ee0-8dea-8b31ba353b1d service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Received unexpected event network-vif-plugged-38ecf3bb-21fe-4683-8cc4-40e133bebe1f for instance with vm_state building and task_state spawning. [ 1020.067080] env[68279]: DEBUG nova.network.neutron [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Successfully updated port: 38ecf3bb-21fe-4683-8cc4-40e133bebe1f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.117636] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dd2fe4-4393-ba93-cac3-52aca1b788e3, 'name': SearchDatastore_Task, 'duration_secs': 0.012125} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.117940] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.118192] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.118682] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.118682] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.118803] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.118992] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eeee881e-edcd-4b2f-9a32-2730d27e9ab9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.127642] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.127832] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1020.128554] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0578b679-22db-4231-83ee-b11897b88997 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.133675] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1020.133675] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f87ad4-1739-4cf4-3257-7f084d7322d4" [ 1020.133675] env[68279]: _type = "Task" [ 1020.133675] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.142575] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f87ad4-1739-4cf4-3257-7f084d7322d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.178459] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a270ecdd-fe2c-4edc-8981-914a0c5250c3 tempest-ServerShowV254Test-2100006249 tempest-ServerShowV254Test-2100006249-project-member] Lock "99024851-0add-44b9-a70a-2e242180d6a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.835s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.393374] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963582, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.572332] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.572492] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.572653] env[68279]: DEBUG nova.network.neutron [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1020.648055] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f87ad4-1739-4cf4-3257-7f084d7322d4, 'name': SearchDatastore_Task, 'duration_secs': 0.009019} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.649083] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c2b610c-a7f6-44e0-ab74-b0ad00989cd5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.654965] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1020.654965] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52945c45-a2e7-1281-5991-c247291a98fc" [ 1020.654965] env[68279]: _type = "Task" [ 1020.654965] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.665425] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52945c45-a2e7-1281-5991-c247291a98fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.896308] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963582, 'name': ReconfigVM_Task, 'duration_secs': 1.313986} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.899074] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Reconfigured VM instance instance-0000005d to attach disk [datastore2] f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b/f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.899915] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7fea954-a86d-46db-894f-30831fadfaa7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.906874] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1020.906874] env[68279]: value = "task-2963583" [ 1020.906874] env[68279]: _type = "Task" [ 1020.906874] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.920650] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963583, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.998029] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7582309c-6926-46dc-80ee-b0240dab210e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.004903] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4da369-9835-4a89-a23a-739e82e33ddb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.039371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cade08d-debc-46d0-8e01-94ab7ea184bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.046714] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309afb8b-0962-402f-b381-5e50f56d86d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.061195] env[68279]: DEBUG nova.compute.provider_tree [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.105198] env[68279]: DEBUG nova.network.neutron [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1021.164459] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52945c45-a2e7-1281-5991-c247291a98fc, 'name': SearchDatastore_Task, 'duration_secs': 0.022151} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.164757] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.164981] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1021.165245] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87d95e50-14b8-445c-91c3-c0a50227e3a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.170927] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1021.170927] env[68279]: value = "task-2963584" [ 1021.170927] env[68279]: _type = "Task" [ 1021.170927] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.181861] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.247812] env[68279]: DEBUG nova.network.neutron [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance_info_cache with network_info: [{"id": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "address": "fa:16:3e:ad:49:45", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ecf3bb-21", "ovs_interfaceid": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.417370] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963583, 'name': Rename_Task, 'duration_secs': 0.145397} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.417875] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.418068] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f25ca99-9015-4240-b2ab-f054a08300d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.424525] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1021.424525] env[68279]: value = "task-2963585" [ 1021.424525] env[68279]: _type = "Task" [ 1021.424525] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.433613] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963585, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.564815] env[68279]: DEBUG nova.scheduler.client.report [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.682425] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963584, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.751566] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.751914] env[68279]: DEBUG nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Instance network_info: |[{"id": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "address": "fa:16:3e:ad:49:45", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ecf3bb-21", "ovs_interfaceid": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1021.752605] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:49:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38ecf3bb-21fe-4683-8cc4-40e133bebe1f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.761478] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.761732] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.761958] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9a55c18-b646-491c-937b-a85686e9abb9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.782258] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.782258] env[68279]: value = "task-2963586" [ 1021.782258] env[68279]: _type = "Task" [ 1021.782258] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.791149] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.934977] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963585, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.002608] env[68279]: DEBUG nova.compute.manager [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Received event network-changed-38ecf3bb-21fe-4683-8cc4-40e133bebe1f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1022.002608] env[68279]: DEBUG nova.compute.manager [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Refreshing instance network info cache due to event network-changed-38ecf3bb-21fe-4683-8cc4-40e133bebe1f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1022.002700] env[68279]: DEBUG oslo_concurrency.lockutils [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] Acquiring lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.002783] env[68279]: DEBUG oslo_concurrency.lockutils [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] Acquired lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.003018] env[68279]: DEBUG nova.network.neutron [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Refreshing network info cache for port 38ecf3bb-21fe-4683-8cc4-40e133bebe1f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.070631] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.071173] env[68279]: DEBUG nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.073731] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 21.887s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.182444] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963584, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567774} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.182653] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.182936] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.183222] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53266ab0-0122-4919-9002-7df5cc032fba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.189613] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1022.189613] env[68279]: value = "task-2963587" [ 1022.189613] env[68279]: _type = "Task" [ 1022.189613] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.197914] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.292270] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.439084] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963585, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.577490] env[68279]: DEBUG nova.compute.utils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1022.589816] env[68279]: DEBUG nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1022.696174] env[68279]: DEBUG nova.network.neutron [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updated VIF entry in instance network info cache for port 38ecf3bb-21fe-4683-8cc4-40e133bebe1f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1022.696550] env[68279]: DEBUG nova.network.neutron [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance_info_cache with network_info: [{"id": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "address": "fa:16:3e:ad:49:45", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ecf3bb-21", "ovs_interfaceid": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.700785] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.262773} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.701268] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1022.702083] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456dfa0a-4b83-4d11-9eb3-3673f048d7b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.722460] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.723041] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44ef0a44-6e27-4021-9103-47c426423399 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.743816] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1022.743816] env[68279]: value = "task-2963588" [ 1022.743816] env[68279]: _type = "Task" [ 1022.743816] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.753088] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963588, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.791534] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.936403] env[68279]: DEBUG oslo_vmware.api [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963585, 'name': PowerOnVM_Task, 'duration_secs': 1.074573} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.936675] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.936887] env[68279]: INFO nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Took 11.12 seconds to spawn the instance on the hypervisor. [ 1022.937096] env[68279]: DEBUG nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.937892] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792d6471-50db-48c8-a1ef-8d584600c823 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.088057] env[68279]: DEBUG nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1023.119018] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119018] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance fe92e176-222c-4c46-a254-1c12e21c68d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119018] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance cfaee7e2-6929-4d8c-8614-e19e0055f2fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119018] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance c62a0d0e-8869-482a-a687-c628b96d6e22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119018] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e3763645-5a78-4929-98a3-108e72071211 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119018] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7d15a05a-f827-40a7-b182-5d2b553481c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119297] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119297] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7e34039c-c51a-4f9c-961c-144f6d8a5130 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119462] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance eccc5882-2c8b-456d-bbd2-d9ed22777a77 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1023.119626] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.119759] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e0afa3e5-4a40-4257-851c-3cccf57b1724 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1023.119881] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1023.120011] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e6f39528-384c-456b-8155-a6856bab3ce0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.120166] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 0731fdf9-f90c-46a4-9165-f6d91767b51b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.120330] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 1bd92b53-46c0-4b63-be20-857cffed87cd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1023.120494] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 4dd80f75-13d0-43d7-8042-b175dff50250 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1023.120631] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 4090e245-b026-4d3a-b7f0-e61543701d8f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1023.120750] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance efda54fe-09a3-4653-b16a-8b3cdd4849c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.120924] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 50f390b2-99b7-49f3-997f-7d7b50cff9f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.121105] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e1b12b1c-5755-41eb-b550-88c573a09877 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.121227] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance a96ea5b4-39c5-4a24-873f-54480f876fbf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.121339] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.121451] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance f4963730-d516-48b7-a320-8af731831a30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.202571] env[68279]: DEBUG oslo_concurrency.lockutils [req-7f10a95e-a05c-474f-b996-4a75740d58ee req-e0202db5-1f6d-430f-a8b1-383f940b02a1 service nova] Releasing lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.254596] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.292249] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.457527] env[68279]: INFO nova.compute.manager [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Took 42.16 seconds to build instance. [ 1023.624714] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance daccaa30-1011-4c7d-a668-05f9329ab4d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.756901] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963588, 'name': ReconfigVM_Task, 'duration_secs': 0.995259} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.757189] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Reconfigured VM instance instance-0000005b to attach disk [datastore2] e1b12b1c-5755-41eb-b550-88c573a09877/e1b12b1c-5755-41eb-b550-88c573a09877.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.757795] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d6b5927-b267-4186-9a15-5e12de332b09 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.764532] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1023.764532] env[68279]: value = "task-2963589" [ 1023.764532] env[68279]: _type = "Task" [ 1023.764532] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.774470] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963589, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.792827] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.959148] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cfb45dbf-c4bd-43da-b6cd-17f2a7a6ebd5 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.555s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.097237] env[68279]: DEBUG nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1024.127304] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.127566] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.127731] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.128995] env[68279]: DEBUG nova.virt.hardware [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.129733] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance f38a489d-ddcb-4a66-bb60-058d46ed69db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1024.129995] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1024.130158] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1024.132888] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be056c1f-a220-42c9-89d9-58a70e7e19f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.141906] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81005fe4-c4f6-4486-ab54-082b87eb42cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.155803] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1024.161443] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Creating folder: Project (b31e2dcefa714b1ea0ff5164135a2439). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1024.164103] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9d28483-e881-42d1-a2d4-972910e25c8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.175509] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Created folder: Project (b31e2dcefa714b1ea0ff5164135a2439) in parent group-v594445. [ 1024.175763] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Creating folder: Instances. Parent ref: group-v594705. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1024.178440] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9be49531-6537-45f5-a073-f42e532dc81e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.189646] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Created folder: Instances in parent group-v594705. [ 1024.189897] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1024.192288] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1024.192718] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ca6e6c1-b60d-4544-8bfe-262eb4daf337 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.213397] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1024.213397] env[68279]: value = "task-2963592" [ 1024.213397] env[68279]: _type = "Task" [ 1024.213397] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.222761] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963592, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.280987] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963589, 'name': Rename_Task, 'duration_secs': 0.125592} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.281529] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.282121] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfc5a862-c70a-4e58-9fa9-a699ee720361 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.302796] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1024.302796] env[68279]: value = "task-2963593" [ 1024.302796] env[68279]: _type = "Task" [ 1024.302796] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.312891] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.321387] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963593, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.487574] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e3e371-d2e7-45d6-af99-401110b3a324 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.494954] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc01182-417e-4a78-982c-0e255c86d776 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.527413] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766055b1-c87c-4409-a987-610930ae4510 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.534538] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65e8c31-c6c5-488f-97e1-a34a0499421c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.548227] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.722432] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963592, 'name': CreateVM_Task, 'duration_secs': 0.23845} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.722664] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.723023] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.723191] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.723519] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1024.723775] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc186019-43b8-4062-8dd9-a6efd9df9406 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.728064] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1024.728064] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52470117-af90-0bbe-4ef8-d1fab5804e8a" [ 1024.728064] env[68279]: _type = "Task" [ 1024.728064] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.735434] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52470117-af90-0bbe-4ef8-d1fab5804e8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.797230] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.802813] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.803059] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.803278] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.803465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.803636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.808971] env[68279]: INFO nova.compute.manager [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Terminating instance [ 1024.815405] env[68279]: DEBUG oslo_vmware.api [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963593, 'name': PowerOnVM_Task, 'duration_secs': 0.414229} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.815670] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1024.815916] env[68279]: DEBUG nova.compute.manager [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.817013] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453331b8-6315-4fcf-97ed-8add096f99c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.051705] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.241696] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52470117-af90-0bbe-4ef8-d1fab5804e8a, 'name': SearchDatastore_Task, 'duration_secs': 0.009434} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.241995] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.242238] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1025.242472] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.242774] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.242994] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1025.243237] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87c9c5e5-0a65-422e-ad13-471c506ef49d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.252503] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1025.252929] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1025.253751] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-996e2d46-9cad-4afb-9325-62867153c4e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.259475] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1025.259475] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523c4940-d762-f544-0d1f-45c4d324e8b5" [ 1025.259475] env[68279]: _type = "Task" [ 1025.259475] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.267052] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523c4940-d762-f544-0d1f-45c4d324e8b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.297314] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.313947] env[68279]: DEBUG nova.compute.manager [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1025.313947] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1025.316019] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cef6f8-3142-441b-9ad3-7b5b3176795f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.321127] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.321373] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-594084f9-5467-4779-a7fe-e72614a54bac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.330621] env[68279]: DEBUG oslo_vmware.api [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1025.330621] env[68279]: value = "task-2963594" [ 1025.330621] env[68279]: _type = "Task" [ 1025.330621] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.336484] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.342462] env[68279]: DEBUG oslo_vmware.api [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.557191] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1025.557708] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.484s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.557708] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.798s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.557872] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.560067] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.459s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.560281] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.562027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.525s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.562209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.564694] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.494s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.564694] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.565876] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.211s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.566084] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.568494] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.293s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.568494] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.569325] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.805s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.569566] env[68279]: DEBUG nova.objects.instance [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lazy-loading 'pci_requests' on Instance uuid daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.601720] env[68279]: INFO nova.scheduler.client.report [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted allocations for instance 4090e245-b026-4d3a-b7f0-e61543701d8f [ 1025.605378] env[68279]: INFO nova.scheduler.client.report [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleted allocations for instance e0afa3e5-4a40-4257-851c-3cccf57b1724 [ 1025.618179] env[68279]: INFO nova.scheduler.client.report [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted allocations for instance 4dd80f75-13d0-43d7-8042-b175dff50250 [ 1025.619853] env[68279]: INFO nova.scheduler.client.report [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleted allocations for instance 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9 [ 1025.644599] env[68279]: INFO nova.scheduler.client.report [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted allocations for instance eccc5882-2c8b-456d-bbd2-d9ed22777a77 [ 1025.649471] env[68279]: INFO nova.scheduler.client.report [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Deleted allocations for instance 1bd92b53-46c0-4b63-be20-857cffed87cd [ 1025.700994] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "e1b12b1c-5755-41eb-b550-88c573a09877" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.702406] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "e1b12b1c-5755-41eb-b550-88c573a09877" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.702406] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "e1b12b1c-5755-41eb-b550-88c573a09877-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.702406] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "e1b12b1c-5755-41eb-b550-88c573a09877-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.702406] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "e1b12b1c-5755-41eb-b550-88c573a09877-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.704269] env[68279]: INFO nova.compute.manager [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Terminating instance [ 1025.771045] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523c4940-d762-f544-0d1f-45c4d324e8b5, 'name': SearchDatastore_Task, 'duration_secs': 0.008919} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.771856] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac192dcd-be62-46e7-a812-8c0f110c6ce5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.777637] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1025.777637] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5226ba06-fc18-e820-2d62-bc159d9b65f0" [ 1025.777637] env[68279]: _type = "Task" [ 1025.777637] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.785765] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5226ba06-fc18-e820-2d62-bc159d9b65f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.797988] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963586, 'name': CreateVM_Task, 'duration_secs': 3.760256} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.798178] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1025.798860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.799040] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.799355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1025.799636] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-455f463c-689b-4115-86c6-82662c673018 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.804036] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1025.804036] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5270543e-c652-dea9-37e0-71a1e17e154a" [ 1025.804036] env[68279]: _type = "Task" [ 1025.804036] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.811716] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5270543e-c652-dea9-37e0-71a1e17e154a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.841063] env[68279]: DEBUG oslo_vmware.api [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963594, 'name': PowerOffVM_Task, 'duration_secs': 0.173776} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.841330] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1025.841506] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1025.841761] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb27cc17-6797-460e-8904-14734efc163b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.907097] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1025.907354] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1025.907555] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleting the datastore file [datastore2] f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.907836] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8860608-6d84-463b-ac1f-b2e315fd3f18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.914050] env[68279]: DEBUG oslo_vmware.api [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1025.914050] env[68279]: value = "task-2963596" [ 1025.914050] env[68279]: _type = "Task" [ 1025.914050] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.923463] env[68279]: DEBUG oslo_vmware.api [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.076418] env[68279]: DEBUG nova.objects.instance [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lazy-loading 'numa_topology' on Instance uuid daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.117871] env[68279]: DEBUG oslo_concurrency.lockutils [None req-36dfcbb3-8ba6-40b3-bcfc-558135a83d53 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4090e245-b026-4d3a-b7f0-e61543701d8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.499s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.119476] env[68279]: DEBUG oslo_concurrency.lockutils [None req-555b8a0c-44d4-4124-b905-b31ce8ee0002 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "e0afa3e5-4a40-4257-851c-3cccf57b1724" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.420s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.131640] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97b9da6f-f6ac-4a42-9042-013f8f91a182 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "4dd80f75-13d0-43d7-8042-b175dff50250" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.585s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.132580] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8b01af11-873a-41c7-a0ee-dbc7f652ecba tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0d8f8797-649e-45de-8b3c-0b47e1d4cdd9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.806s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.160040] env[68279]: DEBUG oslo_concurrency.lockutils [None req-24803d73-b43a-41ac-bdcb-845b954f47f3 tempest-ImagesTestJSON-849923966 tempest-ImagesTestJSON-849923966-project-member] Lock "1bd92b53-46c0-4b63-be20-857cffed87cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.463s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.164020] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74bf302c-4dd7-460f-892e-1b2bf6bb01a2 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "eccc5882-2c8b-456d-bbd2-d9ed22777a77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.774s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.207910] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "refresh_cache-e1b12b1c-5755-41eb-b550-88c573a09877" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.208110] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquired lock "refresh_cache-e1b12b1c-5755-41eb-b550-88c573a09877" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.208294] env[68279]: DEBUG nova.network.neutron [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.288289] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5226ba06-fc18-e820-2d62-bc159d9b65f0, 'name': SearchDatastore_Task, 'duration_secs': 0.016835} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.288553] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.288813] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1026.289069] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96336bf3-155d-4992-8c74-10277a3e0c53 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.295588] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1026.295588] env[68279]: value = "task-2963597" [ 1026.295588] env[68279]: _type = "Task" [ 1026.295588] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.302938] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963597, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.313820] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5270543e-c652-dea9-37e0-71a1e17e154a, 'name': SearchDatastore_Task, 'duration_secs': 0.02346} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.314246] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.314514] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1026.316492] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.316492] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.316492] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.316492] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c7dfa6a-a33e-45d8-ae65-d696ca9e7b8c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.331115] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.331321] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1026.332388] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cec9773-e4a0-40ef-b02f-8fd1c6fc613c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.340949] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1026.340949] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca62d-3829-483f-3ec9-9d41d62fcaeb" [ 1026.340949] env[68279]: _type = "Task" [ 1026.340949] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.349606] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca62d-3829-483f-3ec9-9d41d62fcaeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.427218] env[68279]: DEBUG oslo_vmware.api [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427412} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.427218] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.427218] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1026.427218] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1026.427218] env[68279]: INFO nova.compute.manager [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1026.427720] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1026.427720] env[68279]: DEBUG nova.compute.manager [-] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1026.427720] env[68279]: DEBUG nova.network.neutron [-] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1026.582157] env[68279]: INFO nova.compute.claims [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.668224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.668224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.668224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.668224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.668224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.669787] env[68279]: INFO nova.compute.manager [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Terminating instance [ 1026.735262] env[68279]: DEBUG nova.network.neutron [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.806654] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963597, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.807578] env[68279]: DEBUG nova.network.neutron [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.829281] env[68279]: DEBUG nova.compute.manager [req-c799a64b-6ff2-4caa-b1f5-dd1c9bc41995 req-0956833e-3d69-4c65-98d7-59ed76934032 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Received event network-vif-deleted-4add0663-1007-446c-82db-6e177e9a9a8b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1026.829510] env[68279]: INFO nova.compute.manager [req-c799a64b-6ff2-4caa-b1f5-dd1c9bc41995 req-0956833e-3d69-4c65-98d7-59ed76934032 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Neutron deleted interface 4add0663-1007-446c-82db-6e177e9a9a8b; detaching it from the instance and deleting it from the info cache [ 1026.829700] env[68279]: DEBUG nova.network.neutron [req-c799a64b-6ff2-4caa-b1f5-dd1c9bc41995 req-0956833e-3d69-4c65-98d7-59ed76934032 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.859521] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cca62d-3829-483f-3ec9-9d41d62fcaeb, 'name': SearchDatastore_Task, 'duration_secs': 0.012234} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.861334] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f24acd1-f183-46ae-9d09-a28e37a95221 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.871540] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1026.871540] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524f9161-52c1-d18c-fdd5-e8d17da58ce3" [ 1026.871540] env[68279]: _type = "Task" [ 1026.871540] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.879677] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524f9161-52c1-d18c-fdd5-e8d17da58ce3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.176554] env[68279]: DEBUG nova.compute.manager [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.176817] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.177763] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254d137b-58c1-475c-906f-d9a9999989bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.185323] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.185690] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac5dfed1-4875-4788-ac99-8ae038135157 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.191219] env[68279]: DEBUG oslo_vmware.api [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 1027.191219] env[68279]: value = "task-2963598" [ 1027.191219] env[68279]: _type = "Task" [ 1027.191219] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.204029] env[68279]: DEBUG oslo_vmware.api [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.234238] env[68279]: DEBUG nova.network.neutron [-] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.309792] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963597, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.877989} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.310656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Releasing lock "refresh_cache-e1b12b1c-5755-41eb-b550-88c573a09877" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.311356] env[68279]: DEBUG nova.compute.manager [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.311715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.312247] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1027.312640] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1027.313581] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce5f5d2-6a8a-4656-9f46-d4a8c8753b33 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.317844] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d125d554-6bce-40ef-bc29-b2a452ba27d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.325350] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.327574] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f79ba27a-d27e-43b1-a322-94d1a10fe96f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.329939] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1027.329939] env[68279]: value = "task-2963599" [ 1027.329939] env[68279]: _type = "Task" [ 1027.329939] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.336217] env[68279]: DEBUG oslo_vmware.api [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1027.336217] env[68279]: value = "task-2963600" [ 1027.336217] env[68279]: _type = "Task" [ 1027.336217] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.336561] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13b59e6c-7a96-49f3-adb5-ce668d172873 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.346491] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.356710] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2113c38-0b43-4e22-97f6-cdff41db633e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.368354] env[68279]: DEBUG oslo_vmware.api [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.384323] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524f9161-52c1-d18c-fdd5-e8d17da58ce3, 'name': SearchDatastore_Task, 'duration_secs': 0.019444} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.407316] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.407977] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] a96ea5b4-39c5-4a24-873f-54480f876fbf/a96ea5b4-39c5-4a24-873f-54480f876fbf.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.408403] env[68279]: DEBUG nova.compute.manager [req-c799a64b-6ff2-4caa-b1f5-dd1c9bc41995 req-0956833e-3d69-4c65-98d7-59ed76934032 service nova] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Detach interface failed, port_id=4add0663-1007-446c-82db-6e177e9a9a8b, reason: Instance f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1027.408845] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5f12e34-4f11-4de7-8bcd-cd0a8285cecb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.417292] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1027.417292] env[68279]: value = "task-2963601" [ 1027.417292] env[68279]: _type = "Task" [ 1027.417292] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.429311] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.544770] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "594af7a0-1d0a-43ca-947a-8c5614a289d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.545315] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.589288] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "0b85c3a6-f413-49b1-9936-222117368995" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.590045] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "0b85c3a6-f413-49b1-9936-222117368995" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.705594] env[68279]: DEBUG oslo_vmware.api [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963598, 'name': PowerOffVM_Task, 'duration_secs': 0.194466} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.709042] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.709042] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.709621] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d02c27c9-1b38-4aff-ad10-cb89b3d9c642 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.742499] env[68279]: INFO nova.compute.manager [-] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Took 1.31 seconds to deallocate network for instance. [ 1027.806967] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1027.807341] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1027.807439] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleting the datastore file [datastore2] a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.807987] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5aecda1-b3ea-4cec-a447-f79316952647 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.818184] env[68279]: DEBUG oslo_vmware.api [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for the task: (returnval){ [ 1027.818184] env[68279]: value = "task-2963603" [ 1027.818184] env[68279]: _type = "Task" [ 1027.818184] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.837523] env[68279]: DEBUG oslo_vmware.api [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963603, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.850015] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084231} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.850898] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.851816] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29f1ddf-e06c-468f-b9d5-4215c1f0db3e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.859240] env[68279]: DEBUG oslo_vmware.api [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963600, 'name': PowerOffVM_Task, 'duration_secs': 0.156901} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.859240] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.859240] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.860517] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78641f69-2a42-429f-b468-d4e219a0c96e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.881856] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.885510] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14d91b17-2899-41a9-8902-1f32f4062a16 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.902743] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1027.902743] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1027.902743] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Deleting the datastore file [datastore2] e1b12b1c-5755-41eb-b550-88c573a09877 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.903341] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-515b1c15-5c83-4ac3-902d-3b128a25c666 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.909472] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1027.909472] env[68279]: value = "task-2963605" [ 1027.909472] env[68279]: _type = "Task" [ 1027.909472] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.910925] env[68279]: DEBUG oslo_vmware.api [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for the task: (returnval){ [ 1027.910925] env[68279]: value = "task-2963606" [ 1027.910925] env[68279]: _type = "Task" [ 1027.910925] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.927798] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963605, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.932124] env[68279]: DEBUG oslo_vmware.api [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.936810] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473974} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.937118] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] a96ea5b4-39c5-4a24-873f-54480f876fbf/a96ea5b4-39c5-4a24-873f-54480f876fbf.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1027.937373] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1027.937685] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb719bd6-5afb-443a-9a76-2136e75b8fcd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.944490] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1027.944490] env[68279]: value = "task-2963607" [ 1027.944490] env[68279]: _type = "Task" [ 1027.944490] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.958268] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963607, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.010186] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6580028-38a6-4744-bdfa-3d542bc109c4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.020510] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab59e3f5-3afd-4a41-9341-46bb4a7535d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.049988] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1028.053706] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5766d060-5dad-4da0-8d2f-5f367d21774f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.062293] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d5c91a-33cf-4750-a70c-4c67aa67d9fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.076712] env[68279]: DEBUG nova.compute.provider_tree [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.098471] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1028.197285] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "e6f39528-384c-456b-8155-a6856bab3ce0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.197285] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "e6f39528-384c-456b-8155-a6856bab3ce0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.197285] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "e6f39528-384c-456b-8155-a6856bab3ce0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.197285] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "e6f39528-384c-456b-8155-a6856bab3ce0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.197285] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "e6f39528-384c-456b-8155-a6856bab3ce0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.199992] env[68279]: INFO nova.compute.manager [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Terminating instance [ 1028.253914] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.277519] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "0731fdf9-f90c-46a4-9165-f6d91767b51b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.277938] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.278299] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "0731fdf9-f90c-46a4-9165-f6d91767b51b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.278612] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.278902] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.286291] env[68279]: INFO nova.compute.manager [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Terminating instance [ 1028.327662] env[68279]: DEBUG oslo_vmware.api [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Task: {'id': task-2963603, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232107} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.327922] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.328211] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.328358] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.328479] env[68279]: INFO nova.compute.manager [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1028.328711] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.328899] env[68279]: DEBUG nova.compute.manager [-] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1028.328987] env[68279]: DEBUG nova.network.neutron [-] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1028.423567] env[68279]: DEBUG oslo_vmware.api [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Task: {'id': task-2963606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116134} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.427867] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.427867] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.427867] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.427867] env[68279]: INFO nova.compute.manager [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1028.427867] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.427867] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963605, 'name': ReconfigVM_Task, 'duration_secs': 0.290856} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.428241] env[68279]: DEBUG nova.compute.manager [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1028.428241] env[68279]: DEBUG nova.network.neutron [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1028.430178] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Reconfigured VM instance instance-0000005e to attach disk [datastore1] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.431918] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a8f2450-40fa-4191-b49b-3e15c0847f8e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.440348] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1028.440348] env[68279]: value = "task-2963608" [ 1028.440348] env[68279]: _type = "Task" [ 1028.440348] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.457625] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963608, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.458631] env[68279]: DEBUG nova.network.neutron [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1028.468678] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963607, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077029} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.470357] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1028.471783] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b375eb-0bd3-4b8f-8c4e-ccdb6eae7f30 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.494634] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] a96ea5b4-39c5-4a24-873f-54480f876fbf/a96ea5b4-39c5-4a24-873f-54480f876fbf.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.495306] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e70ca9d-eb75-4fcf-8d11-82cb49df9339 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.516919] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1028.516919] env[68279]: value = "task-2963609" [ 1028.516919] env[68279]: _type = "Task" [ 1028.516919] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.526900] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.578521] env[68279]: DEBUG nova.scheduler.client.report [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.586450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.622389] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.711863] env[68279]: DEBUG nova.compute.manager [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1028.712114] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.713043] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5234d1ce-0751-4088-87e1-6aab13442859 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.724156] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.724451] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e5ef807-6363-4275-9896-0f0eee75e204 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.733885] env[68279]: DEBUG oslo_vmware.api [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 1028.733885] env[68279]: value = "task-2963610" [ 1028.733885] env[68279]: _type = "Task" [ 1028.733885] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.743608] env[68279]: DEBUG oslo_vmware.api [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963610, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.751145] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.751391] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.795708] env[68279]: DEBUG nova.compute.manager [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1028.795943] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.796870] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8194ecce-584f-44d3-b7bf-29fa9c58eba5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.806982] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.807377] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ae831b7-c1bb-4d08-b788-230dc2111a5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.816459] env[68279]: DEBUG oslo_vmware.api [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 1028.816459] env[68279]: value = "task-2963611" [ 1028.816459] env[68279]: _type = "Task" [ 1028.816459] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.825697] env[68279]: DEBUG oslo_vmware.api [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.862230] env[68279]: DEBUG nova.compute.manager [req-cf04663e-8b76-47c8-b31e-14787dac88a9 req-9f8f114b-8693-4087-b82b-b79c7076612f service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Received event network-vif-deleted-efecef48-e3c5-47bc-8da4-6227052ad445 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1028.862404] env[68279]: INFO nova.compute.manager [req-cf04663e-8b76-47c8-b31e-14787dac88a9 req-9f8f114b-8693-4087-b82b-b79c7076612f service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Neutron deleted interface efecef48-e3c5-47bc-8da4-6227052ad445; detaching it from the instance and deleting it from the info cache [ 1028.862583] env[68279]: DEBUG nova.network.neutron [req-cf04663e-8b76-47c8-b31e-14787dac88a9 req-9f8f114b-8693-4087-b82b-b79c7076612f service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.952473] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963608, 'name': Rename_Task, 'duration_secs': 0.391207} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.952768] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.953081] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b4a7ea1-cbfb-4472-b09d-03fb116974e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.962723] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1028.962723] env[68279]: value = "task-2963612" [ 1028.962723] env[68279]: _type = "Task" [ 1028.962723] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.967765] env[68279]: DEBUG nova.network.neutron [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.975864] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963612, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.030530] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.088706] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.519s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.091271] env[68279]: DEBUG nova.network.neutron [-] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.092714] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.963s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.093835] env[68279]: INFO nova.compute.claims [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.145570] env[68279]: INFO nova.network.neutron [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating port a047ea62-0c74-4967-820e-75553a4d8d7c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1029.245669] env[68279]: DEBUG oslo_vmware.api [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963610, 'name': PowerOffVM_Task, 'duration_secs': 0.314788} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.245969] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.246138] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.246412] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-861d6923-5281-4db1-9d6a-1eafdf1ba4c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.253749] env[68279]: DEBUG nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1029.316057] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.316254] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.316442] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleting the datastore file [datastore2] e6f39528-384c-456b-8155-a6856bab3ce0 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.316756] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a089907d-d637-4e35-872b-21f281622fa7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.329743] env[68279]: DEBUG oslo_vmware.api [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963611, 'name': PowerOffVM_Task, 'duration_secs': 0.229644} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.331122] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.331307] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.331615] env[68279]: DEBUG oslo_vmware.api [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 1029.331615] env[68279]: value = "task-2963614" [ 1029.331615] env[68279]: _type = "Task" [ 1029.331615] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.331844] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-873222e6-afc1-40dd-b788-0ec7651dfd69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.343403] env[68279]: DEBUG oslo_vmware.api [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.365690] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-371b7722-4eca-455f-8078-1a3682cfb8b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.380190] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97b3d8b-5ca9-4021-b0a3-62a3faa05e9a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.394973] env[68279]: INFO nova.compute.manager [None req-64e23398-fcee-4fd1-87c6-c8d83620789f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Get console output [ 1029.395344] env[68279]: WARNING nova.virt.vmwareapi.driver [None req-64e23398-fcee-4fd1-87c6-c8d83620789f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] The console log is missing. Check your VSPC configuration [ 1029.424269] env[68279]: DEBUG nova.compute.manager [req-cf04663e-8b76-47c8-b31e-14787dac88a9 req-9f8f114b-8693-4087-b82b-b79c7076612f service nova] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Detach interface failed, port_id=efecef48-e3c5-47bc-8da4-6227052ad445, reason: Instance a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1029.425790] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.426090] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.426090] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleting the datastore file [datastore1] 0731fdf9-f90c-46a4-9165-f6d91767b51b {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.426439] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1008723e-3679-4aa4-acf1-739fb8fc1591 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.439191] env[68279]: DEBUG oslo_vmware.api [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for the task: (returnval){ [ 1029.439191] env[68279]: value = "task-2963616" [ 1029.439191] env[68279]: _type = "Task" [ 1029.439191] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.448151] env[68279]: DEBUG oslo_vmware.api [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.472402] env[68279]: INFO nova.compute.manager [-] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Took 1.04 seconds to deallocate network for instance. [ 1029.472735] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963612, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.527913] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963609, 'name': ReconfigVM_Task, 'duration_secs': 0.590118} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.528255] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Reconfigured VM instance instance-0000005c to attach disk [datastore2] a96ea5b4-39c5-4a24-873f-54480f876fbf/a96ea5b4-39c5-4a24-873f-54480f876fbf.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.528913] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3834a13d-0d20-424f-b1b1-931633295f46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.536878] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1029.536878] env[68279]: value = "task-2963617" [ 1029.536878] env[68279]: _type = "Task" [ 1029.536878] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.546530] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963617, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.598881] env[68279]: INFO nova.compute.manager [-] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Took 1.27 seconds to deallocate network for instance. [ 1029.783113] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.844348] env[68279]: DEBUG oslo_vmware.api [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375537} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.844617] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.844809] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.844988] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.845177] env[68279]: INFO nova.compute.manager [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1029.845418] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.845617] env[68279]: DEBUG nova.compute.manager [-] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1029.845712] env[68279]: DEBUG nova.network.neutron [-] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.952168] env[68279]: DEBUG oslo_vmware.api [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Task: {'id': task-2963616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.422367} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.952449] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.952643] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.952860] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.953148] env[68279]: INFO nova.compute.manager [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1029.953400] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.953630] env[68279]: DEBUG nova.compute.manager [-] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1029.953726] env[68279]: DEBUG nova.network.neutron [-] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.973995] env[68279]: DEBUG oslo_vmware.api [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963612, 'name': PowerOnVM_Task, 'duration_secs': 0.513607} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.974224] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.974443] env[68279]: INFO nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Took 5.88 seconds to spawn the instance on the hypervisor. [ 1029.974629] env[68279]: DEBUG nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.975441] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b3943f-ffc9-4b68-913a-4fdbdf7823a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.978994] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.051547] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963617, 'name': Rename_Task, 'duration_secs': 0.151361} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.051547] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1030.051547] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd8e6fdb-a001-4fec-b81b-5cd75f0f7613 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.057648] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1030.057648] env[68279]: value = "task-2963618" [ 1030.057648] env[68279]: _type = "Task" [ 1030.057648] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.066405] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.109399] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.452262] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97e59b0-ad08-4d18-8611-1fca2fd85d48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.460887] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99894f15-e548-4221-a65a-750d773cfdff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.504064] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae95cda-a8c3-43cb-a6fd-299b7c6304f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.511690] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "c62a0d0e-8869-482a-a687-c628b96d6e22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.511690] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.511690] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "c62a0d0e-8869-482a-a687-c628b96d6e22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.512469] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.512469] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.514429] env[68279]: INFO nova.compute.manager [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Took 33.26 seconds to build instance. [ 1030.515894] env[68279]: INFO nova.compute.manager [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Terminating instance [ 1030.523923] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59e0444-3b5d-4411-9c4c-a5bea80cc376 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.545867] env[68279]: DEBUG nova.compute.provider_tree [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.572818] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963618, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.698573] env[68279]: DEBUG nova.compute.manager [req-b23fea33-6e85-4d1c-9c70-e28e4986e8ab req-8ed1364f-1318-461f-a681-c689729be00c service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Received event network-vif-deleted-ec6a474d-d634-499e-9236-c78e24a8792f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.698573] env[68279]: INFO nova.compute.manager [req-b23fea33-6e85-4d1c-9c70-e28e4986e8ab req-8ed1364f-1318-461f-a681-c689729be00c service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Neutron deleted interface ec6a474d-d634-499e-9236-c78e24a8792f; detaching it from the instance and deleting it from the info cache [ 1030.698743] env[68279]: DEBUG nova.network.neutron [req-b23fea33-6e85-4d1c-9c70-e28e4986e8ab req-8ed1364f-1318-461f-a681-c689729be00c service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.894839] env[68279]: DEBUG nova.compute.manager [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Received event network-vif-deleted-7aacc52c-518b-4424-b4a3-686a01b73bd9 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.895410] env[68279]: INFO nova.compute.manager [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Neutron deleted interface 7aacc52c-518b-4424-b4a3-686a01b73bd9; detaching it from the instance and deleting it from the info cache [ 1030.895678] env[68279]: DEBUG nova.network.neutron [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.908896] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.908896] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.908896] env[68279]: DEBUG nova.network.neutron [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.023192] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1a0671a1-8f5e-4525-9eea-037c62f29af2 tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "f4963730-d516-48b7-a320-8af731831a30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.723s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.033458] env[68279]: DEBUG nova.compute.manager [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1031.033458] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1031.033458] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b395d43-3922-4fe6-8ce2-ceed4217f94e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.044875] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.046699] env[68279]: DEBUG nova.scheduler.client.report [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1031.050818] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c475fb3d-1cd0-4e1e-bc2d-d79a934e5043 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.054593] env[68279]: DEBUG nova.network.neutron [-] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.067972] env[68279]: DEBUG oslo_vmware.api [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1031.067972] env[68279]: value = "task-2963619" [ 1031.067972] env[68279]: _type = "Task" [ 1031.067972] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.075749] env[68279]: DEBUG oslo_vmware.api [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963618, 'name': PowerOnVM_Task, 'duration_secs': 0.698701} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.076429] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.076649] env[68279]: INFO nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Took 11.39 seconds to spawn the instance on the hypervisor. [ 1031.076833] env[68279]: DEBUG nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.077733] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f55d56-db00-4490-9822-b87811dcafcc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.085299] env[68279]: DEBUG nova.network.neutron [-] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.090023] env[68279]: DEBUG oslo_vmware.api [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963619, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.201725] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69e6aea5-3177-4bbe-8727-4c4d28a5c709 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.214816] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83d6b65-d5c9-4458-9dbb-40434c6831d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.252167] env[68279]: DEBUG nova.compute.manager [req-b23fea33-6e85-4d1c-9c70-e28e4986e8ab req-8ed1364f-1318-461f-a681-c689729be00c service nova] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Detach interface failed, port_id=ec6a474d-d634-499e-9236-c78e24a8792f, reason: Instance 0731fdf9-f90c-46a4-9165-f6d91767b51b could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1031.402038] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1637709-9c91-41f6-b47b-06097a809367 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.413435] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68e47cb-4082-45ce-a066-2b7e2fa9b3e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.459742] env[68279]: DEBUG nova.compute.manager [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Detach interface failed, port_id=7aacc52c-518b-4424-b4a3-686a01b73bd9, reason: Instance e6f39528-384c-456b-8155-a6856bab3ce0 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1031.460523] env[68279]: DEBUG nova.compute.manager [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received event network-vif-plugged-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.460956] env[68279]: DEBUG oslo_concurrency.lockutils [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.461370] env[68279]: DEBUG oslo_concurrency.lockutils [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.462026] env[68279]: DEBUG oslo_concurrency.lockutils [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.462026] env[68279]: DEBUG nova.compute.manager [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] No waiting events found dispatching network-vif-plugged-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.462447] env[68279]: WARNING nova.compute.manager [req-a651d9d7-4fa6-4019-9fc3-089168ede261 req-b7152868-b793-491d-9370-b181551ad150 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received unexpected event network-vif-plugged-a047ea62-0c74-4967-820e-75553a4d8d7c for instance with vm_state shelved_offloaded and task_state spawning. [ 1031.543487] env[68279]: INFO nova.compute.manager [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Rebuilding instance [ 1031.554619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.555338] env[68279]: DEBUG nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1031.558232] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.222s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.558605] env[68279]: DEBUG nova.objects.instance [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1031.561465] env[68279]: INFO nova.compute.manager [-] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Took 1.61 seconds to deallocate network for instance. [ 1031.585131] env[68279]: DEBUG oslo_vmware.api [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963619, 'name': PowerOffVM_Task, 'duration_secs': 0.459913} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.585820] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.586149] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1031.586523] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abf13371-1e2e-4274-a261-a352ed130623 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.588607] env[68279]: INFO nova.compute.manager [-] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Took 1.74 seconds to deallocate network for instance. [ 1031.606035] env[68279]: INFO nova.compute.manager [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Took 37.57 seconds to build instance. [ 1031.623296] env[68279]: DEBUG nova.compute.manager [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.624371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beab62a6-0d81-4b2e-b8b8-843421b64a63 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.671869] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1031.672274] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1031.672587] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleting the datastore file [datastore2] c62a0d0e-8869-482a-a687-c628b96d6e22 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1031.673141] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e45908fd-8f79-4cea-a1c9-ccf1bcfe1ff3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.681145] env[68279]: DEBUG oslo_vmware.api [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1031.681145] env[68279]: value = "task-2963621" [ 1031.681145] env[68279]: _type = "Task" [ 1031.681145] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.690361] env[68279]: DEBUG oslo_vmware.api [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.779595] env[68279]: DEBUG nova.network.neutron [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa047ea62-0c", "ovs_interfaceid": "a047ea62-0c74-4967-820e-75553a4d8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.064330] env[68279]: DEBUG nova.compute.utils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1032.068750] env[68279]: DEBUG nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1032.068920] env[68279]: DEBUG nova.network.neutron [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1032.073457] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.096761] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.110022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0494ca87-e2f8-47cc-8b9f-a31427285366 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.704s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.112187] env[68279]: DEBUG nova.policy [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4d9c39cb0a142eab4370307dd41cf0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd517424aba641e4b867e440ba0ee7ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1032.194697] env[68279]: DEBUG oslo_vmware.api [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193735} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.195644] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1032.195644] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1032.195644] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1032.195644] env[68279]: INFO nova.compute.manager [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1032.195945] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.196012] env[68279]: DEBUG nova.compute.manager [-] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1032.196362] env[68279]: DEBUG nova.network.neutron [-] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1032.285170] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b30906498d0b93576eb60339e333f93b',container_format='bare',created_at=2025-03-12T08:49:44Z,direct_url=,disk_format='vmdk',id=c23360f2-77b9-4b89-9c6d-1aafb5e74c2f,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-612171683-shelved',owner='b9d27076ab7348bb9ca331f4ff68e46f',properties=ImageMetaProps,protected=,size=31591424,status='active',tags=,updated_at=2025-03-12T08:50:00Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1032.314888] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1032.315351] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1032.316401] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1032.316591] env[68279]: DEBUG nova.virt.hardware [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1032.317507] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dda4d8-4ef9-4fdc-b8b2-5309065afa70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.329351] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29985c7c-4053-454c-ab23-892065589038 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.349906] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:3b:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a047ea62-0c74-4967-820e-75553a4d8d7c', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.359730] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.360296] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.360529] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c1dcb31-7ca0-496a-a449-ee3a5e1a0a0d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.384475] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.384475] env[68279]: value = "task-2963622" [ 1032.384475] env[68279]: _type = "Task" [ 1032.384475] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.395126] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963622, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.572841] env[68279]: DEBUG nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1032.578167] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ae6e48ac-c1c0-469c-bb8d-c974b014399c tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.578488] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.325s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.578730] env[68279]: DEBUG nova.objects.instance [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lazy-loading 'resources' on Instance uuid f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.645853] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.646160] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e28e786-7e48-4fe3-b742-a3eca1f1a39b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.657296] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1032.657296] env[68279]: value = "task-2963623" [ 1032.657296] env[68279]: _type = "Task" [ 1032.657296] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.665792] env[68279]: DEBUG nova.network.neutron [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Successfully created port: f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.680351] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.901781] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963622, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.172781] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963623, 'name': PowerOffVM_Task, 'duration_secs': 0.14636} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.174243] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.176053] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.176053] env[68279]: DEBUG nova.compute.manager [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received event network-changed-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.176320] env[68279]: DEBUG nova.compute.manager [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Refreshing instance network info cache due to event network-changed-a047ea62-0c74-4967-820e-75553a4d8d7c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1033.176538] env[68279]: DEBUG oslo_concurrency.lockutils [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] Acquiring lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.176675] env[68279]: DEBUG oslo_concurrency.lockutils [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] Acquired lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.176829] env[68279]: DEBUG nova.network.neutron [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Refreshing network info cache for port a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.180606] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d54abb-e1e3-4b68-9c0a-c97a2298ca12 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.197750] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1033.197750] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16c8a289-c7d8-4453-9dcf-61fe51476917 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.240580] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1033.241232] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1033.241232] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Deleting the datastore file [datastore1] f4963730-d516-48b7-a320-8af731831a30 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1033.241443] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfe850de-2fde-4d6e-a684-97efa3a97ef4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.249777] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1033.249777] env[68279]: value = "task-2963625" [ 1033.249777] env[68279]: _type = "Task" [ 1033.249777] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.264402] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.398961] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963622, 'name': CreateVM_Task, 'duration_secs': 0.581788} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.401819] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.403202] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.403202] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.403422] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.403644] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9015af28-2b26-431f-81a5-e4d455c2be6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.410181] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1033.410181] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52516704-60eb-9365-04f0-27a7e2f71621" [ 1033.410181] env[68279]: _type = "Task" [ 1033.410181] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.425055] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52516704-60eb-9365-04f0-27a7e2f71621, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.494333] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0643b2a-191a-4879-b0bc-b7ea8c475221 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.504876] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a263848e-4940-4e58-a3bf-78df01cdb8ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.540408] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a7c17e-6cb4-4cc4-91c4-0570ea361e2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.549386] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974e58b6-b6cf-4f8a-bd72-b5d3dfd9beaf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.564238] env[68279]: DEBUG nova.compute.provider_tree [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.588580] env[68279]: DEBUG nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1033.593209] env[68279]: DEBUG nova.network.neutron [-] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.619112] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.619630] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.619845] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.622415] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.622415] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.622415] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.622714] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.622879] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.623171] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.623439] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.626822] env[68279]: DEBUG nova.virt.hardware [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.626822] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366a9cef-43bb-474b-bda1-8d941ce1d37b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.648584] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2821851-0613-4d75-bf34-7b552a91aa6f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.767715] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14212} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.767715] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.767715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1033.767715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1033.924573] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.924793] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Processing image c23360f2-77b9-4b89-9c6d-1aafb5e74c2f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.925062] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.925217] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.925441] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.925731] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22ffa2b8-f5b1-4226-b4ec-cefb58d2631d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.936418] env[68279]: DEBUG nova.network.neutron [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updated VIF entry in instance network info cache for port a047ea62-0c74-4967-820e-75553a4d8d7c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.936523] env[68279]: DEBUG nova.network.neutron [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa047ea62-0c", "ovs_interfaceid": "a047ea62-0c74-4967-820e-75553a4d8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.940778] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.940778] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.941745] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-828f289d-c02a-4dc2-a442-5396c855726a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.950093] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1033.950093] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528e4133-99d1-bdde-6077-6f6f347da089" [ 1033.950093] env[68279]: _type = "Task" [ 1033.950093] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.961554] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528e4133-99d1-bdde-6077-6f6f347da089, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.069262] env[68279]: DEBUG nova.scheduler.client.report [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.096883] env[68279]: INFO nova.compute.manager [-] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Took 1.90 seconds to deallocate network for instance. [ 1034.143591] env[68279]: DEBUG nova.compute.manager [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1034.443130] env[68279]: DEBUG oslo_concurrency.lockutils [req-f8d5f446-e21d-40db-ac81-2f9fbc329832 req-340f6d62-8755-44cb-99dd-c6676e8b6ef9 service nova] Releasing lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.461625] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Preparing fetch location {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1034.462034] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Fetch image to [datastore2] OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3/OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3.vmdk {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1034.462147] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Downloading stream optimized image c23360f2-77b9-4b89-9c6d-1aafb5e74c2f to [datastore2] OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3/OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3.vmdk on the data store datastore2 as vApp {{(pid=68279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1034.462279] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Downloading image file data c23360f2-77b9-4b89-9c6d-1aafb5e74c2f to the ESX as VM named 'OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3' {{(pid=68279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1034.467352] env[68279]: DEBUG nova.network.neutron [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Successfully updated port: f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.569950] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1034.569950] env[68279]: value = "resgroup-9" [ 1034.569950] env[68279]: _type = "ResourcePool" [ 1034.569950] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1034.572457] env[68279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7f67b02a-e38b-43bd-b665-0370b30692a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.587474] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.590173] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.004s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.591721] env[68279]: INFO nova.compute.claims [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.601438] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lease: (returnval){ [ 1034.601438] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1034.601438] env[68279]: _type = "HttpNfcLease" [ 1034.601438] env[68279]: } obtained for vApp import into resource pool (val){ [ 1034.601438] env[68279]: value = "resgroup-9" [ 1034.601438] env[68279]: _type = "ResourcePool" [ 1034.601438] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1034.601438] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the lease: (returnval){ [ 1034.601438] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1034.601438] env[68279]: _type = "HttpNfcLease" [ 1034.601438] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1034.605703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.607987] env[68279]: INFO nova.scheduler.client.report [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted allocations for instance f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b [ 1034.611718] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1034.611718] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1034.611718] env[68279]: _type = "HttpNfcLease" [ 1034.611718] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1034.654356] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.654585] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.670588] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.715272] env[68279]: DEBUG oslo_concurrency.lockutils [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.715795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.802037] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1034.802037] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.802037] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1034.802037] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.802037] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1034.802037] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1034.802037] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1034.802468] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1034.802743] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1034.803019] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1034.803294] env[68279]: DEBUG nova.virt.hardware [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1034.804249] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b37c5bc-f63f-4d77-b8d2-905671dc9a3f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.813393] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c192bd-7064-4a36-9e1c-a782a1484a37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.828026] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.834603] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.834987] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.835308] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac5eada1-964f-4218-841c-80e7b1194d43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.855059] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.855059] env[68279]: value = "task-2963627" [ 1034.855059] env[68279]: _type = "Task" [ 1034.855059] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.863665] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963627, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.968428] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-f38a489d-ddcb-4a66-bb60-058d46ed69db" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.968428] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-f38a489d-ddcb-4a66-bb60-058d46ed69db" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.968638] env[68279]: DEBUG nova.network.neutron [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.109847] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1035.109847] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1035.109847] env[68279]: _type = "HttpNfcLease" [ 1035.109847] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1035.119337] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1c2965ad-934d-4965-8ae1-3418a5a91862 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.316s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.160115] env[68279]: DEBUG nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1035.206464] env[68279]: DEBUG nova.compute.manager [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Received event network-vif-deleted-21e228fc-8c90-47b4-ae9b-5e46f3ad748b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.206677] env[68279]: DEBUG nova.compute.manager [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Received event network-vif-plugged-f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.206907] env[68279]: DEBUG oslo_concurrency.lockutils [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] Acquiring lock "f38a489d-ddcb-4a66-bb60-058d46ed69db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.207154] env[68279]: DEBUG oslo_concurrency.lockutils [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.207359] env[68279]: DEBUG oslo_concurrency.lockutils [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.207484] env[68279]: DEBUG nova.compute.manager [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] No waiting events found dispatching network-vif-plugged-f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1035.207686] env[68279]: WARNING nova.compute.manager [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Received unexpected event network-vif-plugged-f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 for instance with vm_state building and task_state spawning. [ 1035.207870] env[68279]: DEBUG nova.compute.manager [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Received event network-changed-f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.208033] env[68279]: DEBUG nova.compute.manager [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Refreshing instance network info cache due to event network-changed-f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1035.208199] env[68279]: DEBUG oslo_concurrency.lockutils [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] Acquiring lock "refresh_cache-f38a489d-ddcb-4a66-bb60-058d46ed69db" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.218704] env[68279]: INFO nova.compute.manager [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Detaching volume c6b7b8a8-0a05-44c0-8d92-a721c800ec19 [ 1035.255812] env[68279]: INFO nova.virt.block_device [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Attempting to driver detach volume c6b7b8a8-0a05-44c0-8d92-a721c800ec19 from mountpoint /dev/sdb [ 1035.256169] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1035.256529] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594679', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'name': 'volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'serial': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1035.257520] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4572df7f-7fb1-4a2d-b32e-1086eb8fc0d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.282484] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b61c1d-ea36-477d-aab5-6d6c51147af2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.290669] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8104a73d-e393-4def-9531-dc1cb8a0d927 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.311799] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036efef3-3906-4202-97d0-d262b4d78d05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.327584] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] The volume has not been displaced from its original location: [datastore2] volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19/volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1035.333217] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfiguring VM instance instance-00000042 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1035.333325] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3ba8841-1caf-48c4-b628-5331d305d0ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.354116] env[68279]: DEBUG oslo_vmware.api [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1035.354116] env[68279]: value = "task-2963628" [ 1035.354116] env[68279]: _type = "Task" [ 1035.354116] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.365481] env[68279]: DEBUG oslo_vmware.api [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963628, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.368870] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963627, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.501199] env[68279]: DEBUG nova.network.neutron [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.612056] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1035.612056] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1035.612056] env[68279]: _type = "HttpNfcLease" [ 1035.612056] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1035.620352] env[68279]: DEBUG nova.network.neutron [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Updating instance_info_cache with network_info: [{"id": "f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7", "address": "fa:16:3e:95:5e:70", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf603cc38-e1", "ovs_interfaceid": "f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.680431] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.872693] env[68279]: DEBUG oslo_vmware.api [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.877829] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963627, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.889110] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0a8a4a-223a-47a8-9f72-354aea3aad9d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.897155] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2cbf30-c2e5-4950-9330-5e64f3b43759 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.930133] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ea1d1e-ea36-4a54-8f22-3a2052ed1bf7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.939850] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c381472-46b9-4f8c-8c6c-c17ef3a4f241 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.955143] env[68279]: DEBUG nova.compute.provider_tree [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.113705] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1036.113705] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1036.113705] env[68279]: _type = "HttpNfcLease" [ 1036.113705] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1036.123334] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-f38a489d-ddcb-4a66-bb60-058d46ed69db" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.124426] env[68279]: DEBUG nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Instance network_info: |[{"id": "f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7", "address": "fa:16:3e:95:5e:70", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf603cc38-e1", "ovs_interfaceid": "f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1036.124711] env[68279]: DEBUG oslo_concurrency.lockutils [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] Acquired lock "refresh_cache-f38a489d-ddcb-4a66-bb60-058d46ed69db" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.124927] env[68279]: DEBUG nova.network.neutron [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Refreshing network info cache for port f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.126325] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:5e:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55764410-260e-4339-a020-6b30995584bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.135807] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.137290] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.137447] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5208d5eb-c3c3-41df-bf24-aa10c1f9614e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.159078] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.159078] env[68279]: value = "task-2963629" [ 1036.159078] env[68279]: _type = "Task" [ 1036.159078] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.169443] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963629, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.367838] env[68279]: DEBUG oslo_vmware.api [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963628, 'name': ReconfigVM_Task, 'duration_secs': 0.967383} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.370947] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Reconfigured VM instance instance-00000042 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1036.375407] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1c5f80a-9b6b-4344-a499-f5c6b484ed15 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.391501] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963627, 'name': CreateVM_Task, 'duration_secs': 1.316589} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.392633] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.392989] env[68279]: DEBUG oslo_vmware.api [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1036.392989] env[68279]: value = "task-2963630" [ 1036.392989] env[68279]: _type = "Task" [ 1036.392989] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.393377] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.393533] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.393872] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1036.394187] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b045b78-e131-4475-90d0-899a4ede7cb2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.403190] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1036.403190] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dacae4-ee8d-b303-0975-513ec9b7bf59" [ 1036.403190] env[68279]: _type = "Task" [ 1036.403190] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.406694] env[68279]: DEBUG oslo_vmware.api [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963630, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.415577] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dacae4-ee8d-b303-0975-513ec9b7bf59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.459046] env[68279]: DEBUG nova.scheduler.client.report [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.614792] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1036.614792] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1036.614792] env[68279]: _type = "HttpNfcLease" [ 1036.614792] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1036.615168] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1036.615168] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a70f47-8f44-7f37-9363-1250368d8b82" [ 1036.615168] env[68279]: _type = "HttpNfcLease" [ 1036.615168] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1036.615883] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39486bb3-1fb1-45b4-b7e0-865911813ca0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.628512] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b42304-b9b0-037a-e84c-c9fdc1b9c75a/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1036.628834] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating HTTP connection to write to file with size = 31591424 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b42304-b9b0-037a-e84c-c9fdc1b9c75a/disk-0.vmdk. {{(pid=68279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1036.708031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "525e4894-a8b1-45ae-a846-84ded8d97584" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.708031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "525e4894-a8b1-45ae-a846-84ded8d97584" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.715767] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6ea1417f-e226-4296-9a8d-c3393a24650c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.721957] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963629, 'name': CreateVM_Task, 'duration_secs': 0.52823} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.722573] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.723869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.906276] env[68279]: DEBUG oslo_vmware.api [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963630, 'name': ReconfigVM_Task, 'duration_secs': 0.159735} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.907208] env[68279]: DEBUG nova.network.neutron [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Updated VIF entry in instance network info cache for port f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.907565] env[68279]: DEBUG nova.network.neutron [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Updating instance_info_cache with network_info: [{"id": "f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7", "address": "fa:16:3e:95:5e:70", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf603cc38-e1", "ovs_interfaceid": "f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.909316] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594679', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'name': 'volume-c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19', 'serial': 'c6b7b8a8-0a05-44c0-8d92-a721c800ec19'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1036.923094] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52dacae4-ee8d-b303-0975-513ec9b7bf59, 'name': SearchDatastore_Task, 'duration_secs': 0.034928} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.924194] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.924554] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.924805] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.924970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.925169] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.925466] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.925860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1036.926982] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56c30c16-8080-4603-9e27-8c747c246a89 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.930581] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-205943c0-f530-4422-a460-fa0cdbadad4e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.939032] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1036.939032] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c3546f-d289-3457-5ffa-b0a33dc02581" [ 1036.939032] env[68279]: _type = "Task" [ 1036.939032] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.943571] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.943763] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.944883] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf8b4e8c-2b26-46cc-a8b6-8f7ad352ddac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.950789] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c3546f-d289-3457-5ffa-b0a33dc02581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.954377] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1036.954377] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52017772-b30f-37cd-545e-d0b79d1731ff" [ 1036.954377] env[68279]: _type = "Task" [ 1036.954377] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.961890] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52017772-b30f-37cd-545e-d0b79d1731ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.965774] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.966300] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1036.968808] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.347s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.970517] env[68279]: INFO nova.compute.claims [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.208899] env[68279]: DEBUG nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1037.413891] env[68279]: DEBUG oslo_concurrency.lockutils [req-e79a6c18-65e3-43c8-bb9d-66a21f9739ea req-fa399b7e-fe37-45bb-a9c9-6c471c6de1cf service nova] Releasing lock "refresh_cache-f38a489d-ddcb-4a66-bb60-058d46ed69db" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.453313] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c3546f-d289-3457-5ffa-b0a33dc02581, 'name': SearchDatastore_Task, 'duration_secs': 0.013075} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.455510] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.455798] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.456043] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.465914] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52017772-b30f-37cd-545e-d0b79d1731ff, 'name': SearchDatastore_Task, 'duration_secs': 0.014039} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.467342] env[68279]: DEBUG nova.objects.instance [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lazy-loading 'flavor' on Instance uuid c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.472282] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cc2902e-2d07-4fd3-a189-abd593b2b557 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.478750] env[68279]: DEBUG nova.compute.utils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1037.482565] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1037.482835] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.489254] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1037.489254] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ba46ae-1792-eccc-4fe8-e6928f500a54" [ 1037.489254] env[68279]: _type = "Task" [ 1037.489254] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.501203] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ba46ae-1792-eccc-4fe8-e6928f500a54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.525115] env[68279]: DEBUG nova.policy [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce3eaacf18f94d979400de2071e05ad5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cec84ec5eaf740cab9a1c56bfb9d6244', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.739060] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.813174] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Successfully created port: b0aaf790-98b7-4938-a783-54a408c8f485 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.986875] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1038.005826] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ba46ae-1792-eccc-4fe8-e6928f500a54, 'name': SearchDatastore_Task, 'duration_secs': 0.012603} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.006121] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.006401] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.006669] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.006853] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.007097] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31b6fdba-536a-4ec5-8415-0b0cbdef42db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.010789] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc13559e-6326-44af-9978-cab09779c21a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.021459] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1038.021459] env[68279]: value = "task-2963631" [ 1038.021459] env[68279]: _type = "Task" [ 1038.021459] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.026379] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.026553] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1038.029106] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-010f5e56-efd4-40f6-bfdd-7eaff90a2858 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.041158] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.050105] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1038.050105] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52731b3a-1cd4-2a8c-cb56-f6d1cd477ab6" [ 1038.050105] env[68279]: _type = "Task" [ 1038.050105] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.066548] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52731b3a-1cd4-2a8c-cb56-f6d1cd477ab6, 'name': SearchDatastore_Task, 'duration_secs': 0.01147} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.067972] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Completed reading data from the image iterator. {{(pid=68279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1038.068067] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b42304-b9b0-037a-e84c-c9fdc1b9c75a/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1038.069940] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4bdc73-92ee-4689-a747-71eafd683d0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.074421] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7f6a4b7-c823-4c54-a79a-ae03be856dd3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.084299] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b42304-b9b0-037a-e84c-c9fdc1b9c75a/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1038.084423] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b42304-b9b0-037a-e84c-c9fdc1b9c75a/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1038.084727] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1038.084727] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e96064-7f99-379f-34eb-5ca3996a9f85" [ 1038.084727] env[68279]: _type = "Task" [ 1038.084727] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.084919] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-aa43b74e-ebff-4e4b-9610-fa533ea3a9c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.095776] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e96064-7f99-379f-34eb-5ca3996a9f85, 'name': SearchDatastore_Task, 'duration_secs': 0.011695} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.098458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.098725] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f38a489d-ddcb-4a66-bb60-058d46ed69db/f38a489d-ddcb-4a66-bb60-058d46ed69db.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.099175] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80ee2fcc-fb94-4d5e-a3de-b8eff3c71c66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.106266] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1038.106266] env[68279]: value = "task-2963632" [ 1038.106266] env[68279]: _type = "Task" [ 1038.106266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.119403] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963632, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.346996] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0acae9f-9055-4b7f-95cb-3259e49cbe33 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.359040] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71d26bb-0cd5-49af-8dc4-1cb09301839e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.392390] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2325ce3a-a34a-4bbb-bb76-09ea75e58ad8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.403598] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45872d4b-4e91-4ab6-8418-462c210df65d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.420805] env[68279]: DEBUG nova.compute.provider_tree [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.466200] env[68279]: DEBUG oslo_vmware.rw_handles [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b42304-b9b0-037a-e84c-c9fdc1b9c75a/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1038.466539] env[68279]: INFO nova.virt.vmwareapi.images [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Downloaded image file data c23360f2-77b9-4b89-9c6d-1aafb5e74c2f [ 1038.467493] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfb7e2f-e567-48af-88b6-f2f06a8d356e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.485038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-29cce25a-7f17-41fb-b1cf-e326ba8d827c tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.769s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.486583] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca45489d-be28-4cfc-a4a3-46323d72d9a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.534097] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963631, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.623227] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963632, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.668830] env[68279]: INFO nova.virt.vmwareapi.images [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] The imported VM was unregistered [ 1038.671712] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Caching image {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1038.671964] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Creating directory with path [datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.672836] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8218a108-c957-43d9-bfa7-6cc1e4261ff3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.709204] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Created directory with path [datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.709491] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3/OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3.vmdk to [datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f.vmdk. {{(pid=68279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1038.709895] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c02617c5-f611-4c0b-b1bc-a272bf3fb3c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.719075] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1038.719075] env[68279]: value = "task-2963634" [ 1038.719075] env[68279]: _type = "Task" [ 1038.719075] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.729065] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963634, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.924572] env[68279]: DEBUG nova.scheduler.client.report [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.997151] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1039.023226] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1039.023494] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.023653] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.023833] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.023979] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.024144] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1039.024384] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1039.024542] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1039.024751] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1039.024978] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1039.025187] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1039.026243] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c441c8fc-2dc2-4f8f-9185-c713acf94d3f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.039987] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7413282-16c0-4c3d-af88-707340d9e876 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.048069] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518661} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.048810] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.048971] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.049291] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11696c03-c3f5-43cd-b5f5-0b145d58617b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.067843] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1039.067843] env[68279]: value = "task-2963635" [ 1039.067843] env[68279]: _type = "Task" [ 1039.067843] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.080075] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963635, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.121078] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963632, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.777493} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.121390] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f38a489d-ddcb-4a66-bb60-058d46ed69db/f38a489d-ddcb-4a66-bb60-058d46ed69db.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.121620] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.121892] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d871091a-13b1-4bcf-a722-b865de889ef3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.134594] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1039.134594] env[68279]: value = "task-2963636" [ 1039.134594] env[68279]: _type = "Task" [ 1039.134594] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.147046] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.173778] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.173981] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.174208] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.174398] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.174566] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.176740] env[68279]: INFO nova.compute.manager [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Terminating instance [ 1039.234064] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963634, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.246260] env[68279]: DEBUG nova.compute.manager [req-22926a52-340b-4550-9e48-8465ccade4c8 req-ea9d8857-8fac-4595-883e-dcb8f7e721c7 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Received event network-vif-plugged-b0aaf790-98b7-4938-a783-54a408c8f485 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1039.247100] env[68279]: DEBUG oslo_concurrency.lockutils [req-22926a52-340b-4550-9e48-8465ccade4c8 req-ea9d8857-8fac-4595-883e-dcb8f7e721c7 service nova] Acquiring lock "594af7a0-1d0a-43ca-947a-8c5614a289d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.247100] env[68279]: DEBUG oslo_concurrency.lockutils [req-22926a52-340b-4550-9e48-8465ccade4c8 req-ea9d8857-8fac-4595-883e-dcb8f7e721c7 service nova] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.247100] env[68279]: DEBUG oslo_concurrency.lockutils [req-22926a52-340b-4550-9e48-8465ccade4c8 req-ea9d8857-8fac-4595-883e-dcb8f7e721c7 service nova] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.247825] env[68279]: DEBUG nova.compute.manager [req-22926a52-340b-4550-9e48-8465ccade4c8 req-ea9d8857-8fac-4595-883e-dcb8f7e721c7 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] No waiting events found dispatching network-vif-plugged-b0aaf790-98b7-4938-a783-54a408c8f485 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1039.247825] env[68279]: WARNING nova.compute.manager [req-22926a52-340b-4550-9e48-8465ccade4c8 req-ea9d8857-8fac-4595-883e-dcb8f7e721c7 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Received unexpected event network-vif-plugged-b0aaf790-98b7-4938-a783-54a408c8f485 for instance with vm_state building and task_state spawning. [ 1039.372245] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Successfully updated port: b0aaf790-98b7-4938-a783-54a408c8f485 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.431352] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.432028] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1039.434865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.652s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.437053] env[68279]: INFO nova.compute.claims [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.582567] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963635, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.652624] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.252279} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.653016] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.653894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d335e8d8-f8a6-4bc7-a209-187722e08878 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.683529] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] f38a489d-ddcb-4a66-bb60-058d46ed69db/f38a489d-ddcb-4a66-bb60-058d46ed69db.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.684268] env[68279]: DEBUG nova.compute.manager [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1039.684462] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.684703] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ac6c3d3-bcfd-4ee3-81cc-458dc6ee782c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.700146] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca097d03-9d44-463e-af47-9bb2e9c65c7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.709538] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.709750] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5797d282-1a5a-4e11-9bdc-693237867f19 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.712663] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1039.712663] env[68279]: value = "task-2963637" [ 1039.712663] env[68279]: _type = "Task" [ 1039.712663] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.717712] env[68279]: DEBUG oslo_vmware.api [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1039.717712] env[68279]: value = "task-2963638" [ 1039.717712] env[68279]: _type = "Task" [ 1039.717712] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.727988] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963637, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.731818] env[68279]: DEBUG oslo_vmware.api [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.738220] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963634, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.873218] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "refresh_cache-594af7a0-1d0a-43ca-947a-8c5614a289d9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.873403] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "refresh_cache-594af7a0-1d0a-43ca-947a-8c5614a289d9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.873888] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.941466] env[68279]: DEBUG nova.compute.utils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1039.945016] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1039.945207] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.985790] env[68279]: DEBUG nova.policy [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce3eaacf18f94d979400de2071e05ad5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cec84ec5eaf740cab9a1c56bfb9d6244', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1040.083715] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963635, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.584042} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.083715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1040.083715] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff617a8-623c-4aa5-bbfb-adedfba1a032 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.115056] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.115320] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60f26c94-c0bf-40b7-9c88-aa918852f134 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.138546] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1040.138546] env[68279]: value = "task-2963639" [ 1040.138546] env[68279]: _type = "Task" [ 1040.138546] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.150408] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963639, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.225932] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963637, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.236208] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963634, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.239572] env[68279]: DEBUG oslo_vmware.api [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.306678] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Successfully created port: 629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.430159] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.452610] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1040.652656] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963639, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.695621] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Updating instance_info_cache with network_info: [{"id": "b0aaf790-98b7-4938-a783-54a408c8f485", "address": "fa:16:3e:38:6a:c9", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0aaf790-98", "ovs_interfaceid": "b0aaf790-98b7-4938-a783-54a408c8f485", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.737714] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963637, 'name': ReconfigVM_Task, 'duration_secs': 0.926858} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.737978] env[68279]: DEBUG oslo_vmware.api [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963638, 'name': PowerOffVM_Task, 'duration_secs': 0.900123} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.738814] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Reconfigured VM instance instance-0000005f to attach disk [datastore2] f38a489d-ddcb-4a66-bb60-058d46ed69db/f38a489d-ddcb-4a66-bb60-058d46ed69db.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.739600] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.740250] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.740250] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a950f77c-c643-4e89-8764-21c04d7965a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.742034] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d27f0f7c-80f7-4665-bb51-a62b7e47989c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.748284] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963634, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.756055] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1040.756055] env[68279]: value = "task-2963640" [ 1040.756055] env[68279]: _type = "Task" [ 1040.756055] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.765496] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963640, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.819923] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.820185] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.820372] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleting the datastore file [datastore1] c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.820652] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f4459b1-0a83-402b-87cc-c37b78d753c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.830872] env[68279]: DEBUG oslo_vmware.api [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1040.830872] env[68279]: value = "task-2963642" [ 1040.830872] env[68279]: _type = "Task" [ 1040.830872] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.841796] env[68279]: DEBUG oslo_vmware.api [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.862265] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4009f4-e1de-4284-8595-dd51b5026660 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.871785] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd7a38b-9ed8-43c8-97d1-231be6e32248 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.906463] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2704509-e147-4812-82d5-af3c5f2e8cb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.915962] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b125efd9-14ff-454c-923b-eb827e71b5fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.933997] env[68279]: DEBUG nova.compute.provider_tree [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.150774] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963639, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.198818] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "refresh_cache-594af7a0-1d0a-43ca-947a-8c5614a289d9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.198957] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Instance network_info: |[{"id": "b0aaf790-98b7-4938-a783-54a408c8f485", "address": "fa:16:3e:38:6a:c9", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0aaf790-98", "ovs_interfaceid": "b0aaf790-98b7-4938-a783-54a408c8f485", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1041.199515] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:6a:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee617cec-01ea-4a11-ac04-ef9767f4c86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0aaf790-98b7-4938-a783-54a408c8f485', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1041.207278] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.207519] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1041.207779] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c373f83-a44d-4066-baa2-04a359a64397 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.231153] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1041.231153] env[68279]: value = "task-2963643" [ 1041.231153] env[68279]: _type = "Task" [ 1041.231153] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.237930] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963634, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.515058} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.238523] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3/OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3.vmdk to [datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f.vmdk. [ 1041.238728] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Cleaning up location [datastore2] OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1041.238871] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_2cb511bd-0190-423f-9b0b-69841d0fefa3 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.239147] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0944992e-3b2c-42cb-905d-66c1c91e0fe6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.243626] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963643, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.248821] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1041.248821] env[68279]: value = "task-2963644" [ 1041.248821] env[68279]: _type = "Task" [ 1041.248821] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.258954] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.268048] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963640, 'name': Rename_Task, 'duration_secs': 0.372331} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.269194] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.269194] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53284d14-3eef-4951-9810-c870366231f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.279772] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1041.279772] env[68279]: value = "task-2963645" [ 1041.279772] env[68279]: _type = "Task" [ 1041.279772] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.297507] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.322099] env[68279]: DEBUG nova.compute.manager [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Received event network-changed-b0aaf790-98b7-4938-a783-54a408c8f485 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.322316] env[68279]: DEBUG nova.compute.manager [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Refreshing instance network info cache due to event network-changed-b0aaf790-98b7-4938-a783-54a408c8f485. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1041.322534] env[68279]: DEBUG oslo_concurrency.lockutils [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] Acquiring lock "refresh_cache-594af7a0-1d0a-43ca-947a-8c5614a289d9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.322682] env[68279]: DEBUG oslo_concurrency.lockutils [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] Acquired lock "refresh_cache-594af7a0-1d0a-43ca-947a-8c5614a289d9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.322848] env[68279]: DEBUG nova.network.neutron [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Refreshing network info cache for port b0aaf790-98b7-4938-a783-54a408c8f485 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.344025] env[68279]: DEBUG oslo_vmware.api [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.257764} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.344025] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.344025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.344025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.344025] env[68279]: INFO nova.compute.manager [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1041.344025] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.344025] env[68279]: DEBUG nova.compute.manager [-] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.344025] env[68279]: DEBUG nova.network.neutron [-] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.437523] env[68279]: DEBUG nova.scheduler.client.report [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.465803] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1041.494930] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1041.495128] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.496074] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1041.496074] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.496074] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1041.496074] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1041.496074] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1041.496329] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1041.496329] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1041.496788] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1041.497451] env[68279]: DEBUG nova.virt.hardware [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1041.499125] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23db4f7-53a7-4eba-8ec4-7d99f87bb2c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.509416] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88823dd-6af4-4f15-a88f-c6bbf900b701 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.649918] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963639, 'name': ReconfigVM_Task, 'duration_secs': 1.133723} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.650259] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Reconfigured VM instance instance-0000005e to attach disk [datastore2] f4963730-d516-48b7-a320-8af731831a30/f4963730-d516-48b7-a320-8af731831a30.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.650873] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57e91b2c-c01e-4d2f-a858-6a34546aca88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.657271] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1041.657271] env[68279]: value = "task-2963646" [ 1041.657271] env[68279]: _type = "Task" [ 1041.657271] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.665746] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963646, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.742482] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963643, 'name': CreateVM_Task, 'duration_secs': 0.440576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.742629] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.743636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.743636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.744198] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1041.744198] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfdde595-f4e4-407f-a976-19b82abe3579 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.750911] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1041.750911] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ca547a-4f4b-f08d-7b12-31cc1bcc4672" [ 1041.750911] env[68279]: _type = "Task" [ 1041.750911] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.764015] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ca547a-4f4b-f08d-7b12-31cc1bcc4672, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.766873] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037639} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.767132] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.767301] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.767571] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f.vmdk to [datastore2] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.767900] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed03f190-be2d-4011-bd6f-55fed362f404 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.775671] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1041.775671] env[68279]: value = "task-2963647" [ 1041.775671] env[68279]: _type = "Task" [ 1041.775671] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.787423] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.793109] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963645, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.942913] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.943421] env[68279]: DEBUG nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1041.946544] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.967s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.946812] env[68279]: DEBUG nova.objects.instance [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lazy-loading 'resources' on Instance uuid e1b12b1c-5755-41eb-b550-88c573a09877 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.115511] env[68279]: DEBUG nova.network.neutron [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Updated VIF entry in instance network info cache for port b0aaf790-98b7-4938-a783-54a408c8f485. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.115948] env[68279]: DEBUG nova.network.neutron [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Updating instance_info_cache with network_info: [{"id": "b0aaf790-98b7-4938-a783-54a408c8f485", "address": "fa:16:3e:38:6a:c9", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0aaf790-98", "ovs_interfaceid": "b0aaf790-98b7-4938-a783-54a408c8f485", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.123314] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Successfully updated port: 629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.169514] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963646, 'name': Rename_Task, 'duration_secs': 0.30676} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.169818] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.170105] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5494bc27-4d65-422e-95ca-f2b12ee93c3a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.179245] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1042.179245] env[68279]: value = "task-2963648" [ 1042.179245] env[68279]: _type = "Task" [ 1042.179245] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.189165] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.269730] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ca547a-4f4b-f08d-7b12-31cc1bcc4672, 'name': SearchDatastore_Task, 'duration_secs': 0.012661} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.271373] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.271373] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1042.271373] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.271373] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.271373] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.276350] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3f71299-e8d0-4dbc-ad45-db8b3dc4f35a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.288223] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963647, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.290203] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.290400] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1042.291277] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb887766-c2e8-46ff-a2b0-0af0e21969a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.299654] env[68279]: DEBUG oslo_vmware.api [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963645, 'name': PowerOnVM_Task, 'duration_secs': 0.574309} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.301247] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.301456] env[68279]: INFO nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Took 8.71 seconds to spawn the instance on the hypervisor. [ 1042.301679] env[68279]: DEBUG nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1042.302097] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1042.302097] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5238c955-4aca-3672-2064-4a27b7648b33" [ 1042.302097] env[68279]: _type = "Task" [ 1042.302097] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.302866] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01668ce-59d5-4f82-8eb3-ee8c01ae0374 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.322516] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5238c955-4aca-3672-2064-4a27b7648b33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.454200] env[68279]: DEBUG nova.compute.utils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1042.455998] env[68279]: DEBUG nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1042.456225] env[68279]: DEBUG nova.network.neutron [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1042.459216] env[68279]: DEBUG nova.network.neutron [-] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.621011] env[68279]: DEBUG oslo_concurrency.lockutils [req-022afe7b-ef36-4302-8403-ba06d87f2ec1 req-4f80eceb-5220-4c53-a431-f5d95d10e7e6 service nova] Releasing lock "refresh_cache-594af7a0-1d0a-43ca-947a-8c5614a289d9" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.629950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "refresh_cache-0b85c3a6-f413-49b1-9936-222117368995" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.630183] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "refresh_cache-0b85c3a6-f413-49b1-9936-222117368995" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.630385] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.694686] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963648, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.753537] env[68279]: DEBUG nova.policy [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49483f51e2634b0385fb11abca58ade1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34fd2747aeac4bcd9dd18075cf4ebd8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1042.793040] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963647, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.834032] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5238c955-4aca-3672-2064-4a27b7648b33, 'name': SearchDatastore_Task, 'duration_secs': 0.089917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.840936] env[68279]: INFO nova.compute.manager [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Took 24.73 seconds to build instance. [ 1042.843093] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a294b865-eb7c-4908-8240-f504994b6569 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.854500] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1042.854500] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f7c975-1dd8-8b5c-3c27-8b76de22f93d" [ 1042.854500] env[68279]: _type = "Task" [ 1042.854500] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.869145] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f7c975-1dd8-8b5c-3c27-8b76de22f93d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.911924] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ea2d5e-6d0d-45fc-b558-a4f6366f320f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.927265] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca51b7a9-4427-4804-8c54-2cb37c995aaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.965770] env[68279]: DEBUG nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1042.968642] env[68279]: INFO nova.compute.manager [-] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Took 1.63 seconds to deallocate network for instance. [ 1042.971418] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bb6a91-aad7-45d3-8fea-0b11e6bf4752 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.987729] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f5314f-ddf6-451f-85d0-75cfd1627c84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.007778] env[68279]: DEBUG nova.compute.provider_tree [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.113466] env[68279]: DEBUG nova.network.neutron [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Successfully created port: 343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.133242] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "7d15a05a-f827-40a7-b182-5d2b553481c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.133673] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.133932] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "7d15a05a-f827-40a7-b182-5d2b553481c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.134147] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.134326] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.141281] env[68279]: INFO nova.compute.manager [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Terminating instance [ 1043.167913] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.197318] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963648, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.291857] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963647, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.319573] env[68279]: DEBUG nova.network.neutron [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Updating instance_info_cache with network_info: [{"id": "629e5271-6f84-4de3-ac7a-5ca46f3cdfc0", "address": "fa:16:3e:8b:e3:25", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629e5271-6f", "ovs_interfaceid": "629e5271-6f84-4de3-ac7a-5ca46f3cdfc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.348254] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78cc3d43-eea1-493b-9f72-b2f3859c4794 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.244s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.370605] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f7c975-1dd8-8b5c-3c27-8b76de22f93d, 'name': SearchDatastore_Task, 'duration_secs': 0.082188} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.371188] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.371588] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 594af7a0-1d0a-43ca-947a-8c5614a289d9/594af7a0-1d0a-43ca-947a-8c5614a289d9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.373448] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c17b151-7396-4b96-b6ed-deec345ba5ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.378428] env[68279]: DEBUG nova.compute.manager [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Received event network-vif-deleted-971e9f68-2eb2-418a-92ac-ab9f6e6b6859 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.378820] env[68279]: DEBUG nova.compute.manager [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Received event network-vif-plugged-629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.379163] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] Acquiring lock "0b85c3a6-f413-49b1-9936-222117368995-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.379497] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] Lock "0b85c3a6-f413-49b1-9936-222117368995-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.379769] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] Lock "0b85c3a6-f413-49b1-9936-222117368995-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.380109] env[68279]: DEBUG nova.compute.manager [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] No waiting events found dispatching network-vif-plugged-629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.383308] env[68279]: WARNING nova.compute.manager [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Received unexpected event network-vif-plugged-629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 for instance with vm_state building and task_state spawning. [ 1043.383308] env[68279]: DEBUG nova.compute.manager [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Received event network-changed-629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.383308] env[68279]: DEBUG nova.compute.manager [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Refreshing instance network info cache due to event network-changed-629e5271-6f84-4de3-ac7a-5ca46f3cdfc0. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1043.383308] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] Acquiring lock "refresh_cache-0b85c3a6-f413-49b1-9936-222117368995" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.392205] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1043.392205] env[68279]: value = "task-2963649" [ 1043.392205] env[68279]: _type = "Task" [ 1043.392205] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.410762] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.479060] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.511572] env[68279]: DEBUG nova.scheduler.client.report [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.646494] env[68279]: DEBUG nova.compute.manager [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1043.646907] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.648633] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9b0b3f-49d5-421a-9e26-ad7cb88e2cf1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.662539] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.664268] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf2e1a22-da21-4498-a8e8-a06dfb972800 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.672764] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 1043.672764] env[68279]: value = "task-2963650" [ 1043.672764] env[68279]: _type = "Task" [ 1043.672764] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.685513] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.697975] env[68279]: DEBUG oslo_vmware.api [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963648, 'name': PowerOnVM_Task, 'duration_secs': 1.2077} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.698276] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.698516] env[68279]: DEBUG nova.compute.manager [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.699377] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb633819-e703-4361-93f4-ed9d7a10ac86 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.795888] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963647, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.821574] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "refresh_cache-0b85c3a6-f413-49b1-9936-222117368995" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.822650] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Instance network_info: |[{"id": "629e5271-6f84-4de3-ac7a-5ca46f3cdfc0", "address": "fa:16:3e:8b:e3:25", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629e5271-6f", "ovs_interfaceid": "629e5271-6f84-4de3-ac7a-5ca46f3cdfc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1043.823114] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] Acquired lock "refresh_cache-0b85c3a6-f413-49b1-9936-222117368995" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.823418] env[68279]: DEBUG nova.network.neutron [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Refreshing network info cache for port 629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.825763] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:e3:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee617cec-01ea-4a11-ac04-ef9767f4c86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '629e5271-6f84-4de3-ac7a-5ca46f3cdfc0', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1043.836528] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1043.837759] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1043.838016] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bc8fa23-e34e-4d92-931e-2908641b4346 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.864150] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1043.864150] env[68279]: value = "task-2963651" [ 1043.864150] env[68279]: _type = "Task" [ 1043.864150] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.878197] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963651, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.907628] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.976141] env[68279]: DEBUG nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1043.989349] env[68279]: DEBUG oslo_concurrency.lockutils [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.989349] env[68279]: DEBUG oslo_concurrency.lockutils [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.989349] env[68279]: DEBUG nova.compute.manager [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.989567] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941db19b-c3b8-4a1e-9ab6-1da403b41008 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.002950] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1044.003238] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.003395] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1044.003578] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.003722] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1044.003865] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1044.004090] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1044.004258] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1044.004422] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1044.004585] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1044.004758] env[68279]: DEBUG nova.virt.hardware [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1044.005994] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a0ff35-fdaf-4294-ba1d-f726e4da0482 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.010272] env[68279]: DEBUG nova.compute.manager [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1044.010840] env[68279]: DEBUG nova.objects.instance [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'flavor' on Instance uuid f38a489d-ddcb-4a66-bb60-058d46ed69db {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.018166] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.021538] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.911s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.021538] env[68279]: DEBUG nova.objects.instance [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lazy-loading 'resources' on Instance uuid a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.023150] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7503dd6d-e14b-4988-9d62-3d10cc00c8d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.045798] env[68279]: INFO nova.scheduler.client.report [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Deleted allocations for instance e1b12b1c-5755-41eb-b550-88c573a09877 [ 1044.184206] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.221915] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.289838] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963647, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.400589} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.290273] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f/c23360f2-77b9-4b89-9c6d-1aafb5e74c2f.vmdk to [datastore2] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1044.291145] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f391e60-dc73-4a66-895a-1bd2e2d6a9ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.315457] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.315783] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7a8ea18-9397-41ad-8932-ee5f3cf8a671 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.342290] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1044.342290] env[68279]: value = "task-2963652" [ 1044.342290] env[68279]: _type = "Task" [ 1044.342290] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.354282] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963652, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.380595] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963651, 'name': CreateVM_Task, 'duration_secs': 0.488506} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.380783] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.381531] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.381690] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.382033] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1044.382429] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa9426e3-9637-4bfe-9c9a-cdb1f914f4fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.389509] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1044.389509] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52de984c-ce89-3fb7-2b32-6cdca4f4dd58" [ 1044.389509] env[68279]: _type = "Task" [ 1044.389509] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.404236] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52de984c-ce89-3fb7-2b32-6cdca4f4dd58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.407640] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963649, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.556131] env[68279]: DEBUG oslo_concurrency.lockutils [None req-89922242-77d9-4d0a-af20-4463cad71307 tempest-ServerShowV257Test-192360750 tempest-ServerShowV257Test-192360750-project-member] Lock "e1b12b1c-5755-41eb-b550-88c573a09877" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.855s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.641167] env[68279]: DEBUG nova.network.neutron [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Updated VIF entry in instance network info cache for port 629e5271-6f84-4de3-ac7a-5ca46f3cdfc0. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.641167] env[68279]: DEBUG nova.network.neutron [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Updating instance_info_cache with network_info: [{"id": "629e5271-6f84-4de3-ac7a-5ca46f3cdfc0", "address": "fa:16:3e:8b:e3:25", "network": {"id": "9f7f43c3-0e11-4aa5-8e87-08354d0bab94", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1099118862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cec84ec5eaf740cab9a1c56bfb9d6244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629e5271-6f", "ovs_interfaceid": "629e5271-6f84-4de3-ac7a-5ca46f3cdfc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.688445] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963650, 'name': PowerOffVM_Task, 'duration_secs': 0.703383} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.688687] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.689034] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.692076] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-136d7257-a012-4094-a04b-e95072708619 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.725901] env[68279]: DEBUG nova.network.neutron [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Successfully updated port: 343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.790377] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.790672] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.791735] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Deleting the datastore file [datastore2] 7d15a05a-f827-40a7-b182-5d2b553481c7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.791735] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1573384d-8405-464f-96f1-f3c22cd33bba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.800023] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for the task: (returnval){ [ 1044.800023] env[68279]: value = "task-2963654" [ 1044.800023] env[68279]: _type = "Task" [ 1044.800023] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.810599] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.852457] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963652, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.911341] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52de984c-ce89-3fb7-2b32-6cdca4f4dd58, 'name': SearchDatastore_Task, 'duration_secs': 0.02484} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.920642] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.920642] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.920804] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.920920] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.921168] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.921534] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963649, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.316296} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.926498] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4f1de27-5583-44e0-bcca-a8c5e67eea0a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.930267] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 594af7a0-1d0a-43ca-947a-8c5614a289d9/594af7a0-1d0a-43ca-947a-8c5614a289d9.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1044.930790] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1044.932479] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8be1e331-8831-416a-a19a-68c5ea502f9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.942896] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1044.942896] env[68279]: value = "task-2963655" [ 1044.942896] env[68279]: _type = "Task" [ 1044.942896] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.952942] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.953294] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.955057] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36496d75-806c-4104-9d56-c851101348d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.962055] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f560684-9ffd-4e3e-b4ce-a704864b4745 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.967402] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.973303] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0fea377-8d7c-4751-8570-57b6c7e49190 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.977776] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1044.977776] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5227e8e6-3b9d-bad6-4892-c83a0627f0b6" [ 1044.977776] env[68279]: _type = "Task" [ 1044.977776] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.010566] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f7328d-6b34-4631-bc8d-cd308799da14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.017160] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5227e8e6-3b9d-bad6-4892-c83a0627f0b6, 'name': SearchDatastore_Task, 'duration_secs': 0.014887} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.018344] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bddcb37a-8d33-4e8a-b2a7-b468c21f5577 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.024598] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.025274] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-264aaa4d-aefa-46bc-ad94-285a6eed017f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.027980] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f76beb6-9127-4aba-8731-a0ca35242f00 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.033848] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1045.033848] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c07460-f67d-bf6e-92a0-88328dee55d2" [ 1045.033848] env[68279]: _type = "Task" [ 1045.033848] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.047109] env[68279]: DEBUG nova.compute.provider_tree [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.050404] env[68279]: DEBUG oslo_vmware.api [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1045.050404] env[68279]: value = "task-2963656" [ 1045.050404] env[68279]: _type = "Task" [ 1045.050404] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.057596] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c07460-f67d-bf6e-92a0-88328dee55d2, 'name': SearchDatastore_Task, 'duration_secs': 0.022193} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.058345] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.058676] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 0b85c3a6-f413-49b1-9936-222117368995/0b85c3a6-f413-49b1-9936-222117368995.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.058943] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19472d66-b81e-457f-9c58-d009efe472a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.064438] env[68279]: DEBUG oslo_vmware.api [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.070371] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1045.070371] env[68279]: value = "task-2963657" [ 1045.070371] env[68279]: _type = "Task" [ 1045.070371] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.083203] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.146086] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eee3dfe-5833-4142-82e9-61b72d7a8362 req-62c545b0-d059-4d5b-9179-290158181ce7 service nova] Releasing lock "refresh_cache-0b85c3a6-f413-49b1-9936-222117368995" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.234981] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.238167] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.238167] env[68279]: DEBUG nova.network.neutron [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.313928] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.357039] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963652, 'name': ReconfigVM_Task, 'duration_secs': 0.973137} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.357039] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Reconfigured VM instance instance-0000003a to attach disk [datastore2] daccaa30-1011-4c7d-a668-05f9329ab4d5/daccaa30-1011-4c7d-a668-05f9329ab4d5.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.357623] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba6f4dc8-81e3-4266-ac2e-346b93f5a91e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.365610] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1045.365610] env[68279]: value = "task-2963658" [ 1045.365610] env[68279]: _type = "Task" [ 1045.365610] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.377318] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963658, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.416918] env[68279]: DEBUG nova.compute.manager [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-vif-plugged-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.417256] env[68279]: DEBUG oslo_concurrency.lockutils [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.417394] env[68279]: DEBUG oslo_concurrency.lockutils [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.417608] env[68279]: DEBUG oslo_concurrency.lockutils [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.417810] env[68279]: DEBUG nova.compute.manager [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] No waiting events found dispatching network-vif-plugged-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.418010] env[68279]: WARNING nova.compute.manager [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received unexpected event network-vif-plugged-343369ce-f2d1-401a-9a78-b72854001a75 for instance with vm_state building and task_state spawning. [ 1045.418213] env[68279]: DEBUG nova.compute.manager [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-changed-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.418385] env[68279]: DEBUG nova.compute.manager [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing instance network info cache due to event network-changed-343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1045.418572] env[68279]: DEBUG oslo_concurrency.lockutils [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] Acquiring lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.426647] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "f4963730-d516-48b7-a320-8af731831a30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.426918] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "f4963730-d516-48b7-a320-8af731831a30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.427210] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "f4963730-d516-48b7-a320-8af731831a30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.427454] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "f4963730-d516-48b7-a320-8af731831a30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.427633] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "f4963730-d516-48b7-a320-8af731831a30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.429863] env[68279]: INFO nova.compute.manager [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Terminating instance [ 1045.455981] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077148} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.456331] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1045.457145] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca58f9f-5785-46cd-ba86-9f12e45fb66d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.482680] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 594af7a0-1d0a-43ca-947a-8c5614a289d9/594af7a0-1d0a-43ca-947a-8c5614a289d9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1045.483440] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78319090-0114-40e9-a109-a18c92158023 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.507799] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1045.507799] env[68279]: value = "task-2963659" [ 1045.507799] env[68279]: _type = "Task" [ 1045.507799] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.521299] env[68279]: DEBUG oslo_concurrency.lockutils [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.521540] env[68279]: DEBUG oslo_concurrency.lockutils [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.523565] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963659, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.552500] env[68279]: DEBUG nova.scheduler.client.report [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.571446] env[68279]: DEBUG oslo_vmware.api [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963656, 'name': PowerOffVM_Task, 'duration_secs': 0.261933} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.575567] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1045.575803] env[68279]: DEBUG nova.compute.manager [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.576919] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b26d0f-fc4e-49c0-b47b-b9db0357c444 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.588779] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963657, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.774198] env[68279]: DEBUG nova.network.neutron [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1045.811676] env[68279]: DEBUG oslo_vmware.api [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Task: {'id': task-2963654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.546564} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.811862] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.812129] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.812365] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.812569] env[68279]: INFO nova.compute.manager [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Took 2.17 seconds to destroy the instance on the hypervisor. [ 1045.812833] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.813067] env[68279]: DEBUG nova.compute.manager [-] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1045.813184] env[68279]: DEBUG nova.network.neutron [-] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.875968] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963658, 'name': Rename_Task, 'duration_secs': 0.241507} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.878579] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.878856] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7887301-c4b9-48c8-91d4-3553788cc8a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.885869] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1045.885869] env[68279]: value = "task-2963660" [ 1045.885869] env[68279]: _type = "Task" [ 1045.885869] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.895148] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963660, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.916617] env[68279]: DEBUG nova.network.neutron [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343369ce-f2", "ovs_interfaceid": "343369ce-f2d1-401a-9a78-b72854001a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.933997] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "refresh_cache-f4963730-d516-48b7-a320-8af731831a30" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.934252] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquired lock "refresh_cache-f4963730-d516-48b7-a320-8af731831a30" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.934449] env[68279]: DEBUG nova.network.neutron [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.018140] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963659, 'name': ReconfigVM_Task, 'duration_secs': 0.338154} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.018504] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 594af7a0-1d0a-43ca-947a-8c5614a289d9/594af7a0-1d0a-43ca-947a-8c5614a289d9.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1046.019144] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b8c9bfc-a389-4b9c-9d01-2e09cbe4942e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.025029] env[68279]: DEBUG nova.compute.utils [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1046.027758] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1046.027758] env[68279]: value = "task-2963661" [ 1046.027758] env[68279]: _type = "Task" [ 1046.027758] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.044859] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963661, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.058370] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.038s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.061709] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.988s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.061990] env[68279]: DEBUG nova.objects.instance [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lazy-loading 'resources' on Instance uuid 0731fdf9-f90c-46a4-9165-f6d91767b51b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.082969] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701005} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.083266] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 0b85c3a6-f413-49b1-9936-222117368995/0b85c3a6-f413-49b1-9936-222117368995.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.083500] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.083763] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-060e272d-9efb-4931-b4a2-afa5a8aebcd4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.089454] env[68279]: INFO nova.scheduler.client.report [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Deleted allocations for instance a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e [ 1046.091937] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1046.091937] env[68279]: value = "task-2963662" [ 1046.091937] env[68279]: _type = "Task" [ 1046.091937] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.103279] env[68279]: DEBUG oslo_concurrency.lockutils [None req-19a249d6-16f9-41b9-8cf1-de4601b184cd tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.115s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.115031] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963662, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.396931] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963660, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.419670] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.420063] env[68279]: DEBUG nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Instance network_info: |[{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343369ce-f2", "ovs_interfaceid": "343369ce-f2d1-401a-9a78-b72854001a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1046.420386] env[68279]: DEBUG oslo_concurrency.lockutils [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] Acquired lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.420566] env[68279]: DEBUG nova.network.neutron [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1046.421853] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:8f:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '343369ce-f2d1-401a-9a78-b72854001a75', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.430274] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1046.431490] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1046.431818] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df617b08-8ecf-48dc-8daf-21f11193d708 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.460436] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.460436] env[68279]: value = "task-2963663" [ 1046.460436] env[68279]: _type = "Task" [ 1046.460436] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.470495] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963663, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.471548] env[68279]: DEBUG nova.network.neutron [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1046.529644] env[68279]: DEBUG oslo_concurrency.lockutils [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.534813] env[68279]: DEBUG nova.network.neutron [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.543533] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963661, 'name': Rename_Task, 'duration_secs': 0.15669} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.543971] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.544845] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e660591-58ef-45ca-a6ed-2e0f012bdaf3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.553110] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1046.553110] env[68279]: value = "task-2963664" [ 1046.553110] env[68279]: _type = "Task" [ 1046.553110] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.561948] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.603504] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f61144c3-8365-4874-b378-8fdc31f4de56 tempest-ListServerFiltersTestJSON-2130527500 tempest-ListServerFiltersTestJSON-2130527500-project-member] Lock "a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.938s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.614445] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963662, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102302} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.619951] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.619951] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb4eda5-dbcb-436c-ab15-79f7748d3b8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.646141] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 0b85c3a6-f413-49b1-9936-222117368995/0b85c3a6-f413-49b1-9936-222117368995.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.651113] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c631354-b63f-4da2-9139-ad84fc8c692c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.669181] env[68279]: DEBUG nova.network.neutron [-] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.677714] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1046.677714] env[68279]: value = "task-2963665" [ 1046.677714] env[68279]: _type = "Task" [ 1046.677714] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.694772] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963665, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.905613] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963660, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.975470] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963663, 'name': CreateVM_Task, 'duration_secs': 0.421389} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.978723] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.979611] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.979712] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.980676] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1046.980676] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4b88b9d-b506-4d44-a9ff-8a1a20ea4ec1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.986065] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1046.986065] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5241ff38-6bb4-e5b0-599e-cf84f8e40349" [ 1046.986065] env[68279]: _type = "Task" [ 1046.986065] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.996035] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5241ff38-6bb4-e5b0-599e-cf84f8e40349, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.022287] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb6e6d6-8fb0-4ef4-a173-f24e9378be38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.032889] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5709065-5b6c-4df7-acef-fbfc7f812341 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.039515] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Releasing lock "refresh_cache-f4963730-d516-48b7-a320-8af731831a30" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.040025] env[68279]: DEBUG nova.compute.manager [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1047.040753] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1047.041807] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c05ac2e-c1c4-468d-b42c-dfca74d36bc8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.076571] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f34f1ef-93f4-4bc0-af21-7276ae8f4561 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.086175] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.086979] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f605ec4b-ffcb-49e4-a182-fc298bd45d73 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.097030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd30163-2ab0-4119-8bea-d2172000f958 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.105835] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963664, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.115283] env[68279]: DEBUG oslo_vmware.api [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1047.115283] env[68279]: value = "task-2963666" [ 1047.115283] env[68279]: _type = "Task" [ 1047.115283] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.135616] env[68279]: DEBUG nova.compute.provider_tree [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.143431] env[68279]: DEBUG oslo_vmware.api [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.171682] env[68279]: INFO nova.compute.manager [-] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Took 1.36 seconds to deallocate network for instance. [ 1047.191248] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963665, 'name': ReconfigVM_Task, 'duration_secs': 0.417738} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.191538] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 0b85c3a6-f413-49b1-9936-222117368995/0b85c3a6-f413-49b1-9936-222117368995.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.192583] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb8c3cdc-1952-47aa-8eb7-d99e9a330ac6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.200624] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1047.200624] env[68279]: value = "task-2963667" [ 1047.200624] env[68279]: _type = "Task" [ 1047.200624] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.210034] env[68279]: DEBUG nova.compute.manager [req-23615a34-090e-4d26-8678-2cc4df9d5cdb req-0e0a9d32-93df-4001-9fe2-8c1ba4246536 service nova] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Received event network-vif-deleted-20f8cd48-6520-4f63-866e-b8e360f8b818 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1047.210786] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963667, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.323067] env[68279]: DEBUG nova.network.neutron [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updated VIF entry in instance network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1047.323508] env[68279]: DEBUG nova.network.neutron [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343369ce-f2", "ovs_interfaceid": "343369ce-f2d1-401a-9a78-b72854001a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.397769] env[68279]: DEBUG oslo_vmware.api [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963660, 'name': PowerOnVM_Task, 'duration_secs': 1.121129} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.398108] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.498653] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5241ff38-6bb4-e5b0-599e-cf84f8e40349, 'name': SearchDatastore_Task, 'duration_secs': 0.020607} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.498653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.498846] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.499068] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.499222] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.499428] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.499716] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a8873bc-4cfe-48d4-958e-1372552f537e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.509727] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.509917] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1047.510853] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4096c5b-eb67-4002-809e-3bdab91bf530 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.516159] env[68279]: DEBUG nova.compute.manager [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1047.516949] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3287134c-51e4-41f9-add4-46d64e32f3c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.521554] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1047.521554] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52804c9a-e3a8-9a73-9e7b-e3a57235a6a4" [ 1047.521554] env[68279]: _type = "Task" [ 1047.521554] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.539009] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52804c9a-e3a8-9a73-9e7b-e3a57235a6a4, 'name': SearchDatastore_Task, 'duration_secs': 0.010788} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.539009] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0de7065-5f66-4585-9442-df73abe368f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.544911] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1047.544911] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527f39bd-da4d-2a7b-1ce3-104541133433" [ 1047.544911] env[68279]: _type = "Task" [ 1047.544911] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.554475] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527f39bd-da4d-2a7b-1ce3-104541133433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.584698] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963664, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.596791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.597060] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.597266] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "f38a489d-ddcb-4a66-bb60-058d46ed69db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.597448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.597626] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.599803] env[68279]: INFO nova.compute.manager [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Terminating instance [ 1047.615729] env[68279]: DEBUG oslo_concurrency.lockutils [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.616544] env[68279]: DEBUG oslo_concurrency.lockutils [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.616544] env[68279]: INFO nova.compute.manager [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Attaching volume 5831cc8f-f303-46ba-a0ca-3334dbc1eeb4 to /dev/sdb [ 1047.631717] env[68279]: DEBUG oslo_vmware.api [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963666, 'name': PowerOffVM_Task, 'duration_secs': 0.278027} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.631993] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.632186] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1047.632728] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-898760ad-ad55-440c-97d8-5a54753afd98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.638612] env[68279]: DEBUG nova.scheduler.client.report [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.661642] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423e3dec-77da-4a66-a088-1cd911105ff3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.665291] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1047.665802] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1047.665802] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Deleting the datastore file [datastore2] f4963730-d516-48b7-a320-8af731831a30 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1047.666420] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17b5dc7c-04e4-4b59-b3d2-aaec6f1a6eee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.670413] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1095ce71-174b-4a4b-8a5e-2737854aa583 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.674512] env[68279]: DEBUG oslo_vmware.api [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for the task: (returnval){ [ 1047.674512] env[68279]: value = "task-2963669" [ 1047.674512] env[68279]: _type = "Task" [ 1047.674512] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.678435] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.684093] env[68279]: DEBUG oslo_vmware.api [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.687877] env[68279]: DEBUG nova.virt.block_device [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updating existing volume attachment record: aa849a80-dac4-4336-8d43-36a386b68f74 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1047.713252] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963667, 'name': Rename_Task, 'duration_secs': 0.172476} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.713894] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.713894] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fdfde52-dc65-4f84-8b2d-bb81e4b138bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.721630] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1047.721630] env[68279]: value = "task-2963670" [ 1047.721630] env[68279]: _type = "Task" [ 1047.721630] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.731138] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.827497] env[68279]: DEBUG oslo_concurrency.lockutils [req-37fd678a-282f-4613-8a8c-e4eb9899b5c3 req-b091b292-abad-4f9b-8c43-941e4e7faaad service nova] Releasing lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.039500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-45f32671-c9a8-47f9-9598-f55edf63ba36 tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 32.302s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.056031] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527f39bd-da4d-2a7b-1ce3-104541133433, 'name': SearchDatastore_Task, 'duration_secs': 0.010959} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.056031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.056311] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1048.056671] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2cfe9da-9b79-49e5-a0c3-61dccffa5b4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.066981] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1048.066981] env[68279]: value = "task-2963672" [ 1048.066981] env[68279]: _type = "Task" [ 1048.066981] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.085159] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.087351] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963664, 'name': PowerOnVM_Task, 'duration_secs': 1.057051} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.087608] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.087877] env[68279]: INFO nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1048.088205] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.089106] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a487ae-85d3-4535-8791-67c640c67701 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.104811] env[68279]: DEBUG nova.compute.manager [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.105194] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.106143] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f72357-12f2-414a-948f-14fd3d4d3f90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.115659] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.116163] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08880ac5-109f-46a4-acb1-89b1c3eb1857 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.145049] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.147688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.051s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.148059] env[68279]: DEBUG nova.objects.instance [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lazy-loading 'resources' on Instance uuid e6f39528-384c-456b-8155-a6856bab3ce0 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.181590] env[68279]: INFO nova.scheduler.client.report [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleted allocations for instance 0731fdf9-f90c-46a4-9165-f6d91767b51b [ 1048.194058] env[68279]: DEBUG oslo_vmware.api [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Task: {'id': task-2963669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164746} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.195694] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1048.195884] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1048.196208] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1048.196490] env[68279]: INFO nova.compute.manager [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1048.196827] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1048.196960] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.197162] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.197363] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleting the datastore file [datastore2] f38a489d-ddcb-4a66-bb60-058d46ed69db {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.197641] env[68279]: DEBUG nova.compute.manager [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1048.197785] env[68279]: DEBUG nova.network.neutron [-] [instance: f4963730-d516-48b7-a320-8af731831a30] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1048.199704] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1d23446-6d80-4a78-8e00-4be679475e6d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.214077] env[68279]: DEBUG oslo_vmware.api [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1048.214077] env[68279]: value = "task-2963676" [ 1048.214077] env[68279]: _type = "Task" [ 1048.214077] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.219552] env[68279]: DEBUG nova.network.neutron [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1048.224543] env[68279]: DEBUG oslo_vmware.api [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963676, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.235089] env[68279]: DEBUG oslo_vmware.api [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963670, 'name': PowerOnVM_Task, 'duration_secs': 0.50066} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.235394] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.235607] env[68279]: INFO nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Took 6.77 seconds to spawn the instance on the hypervisor. [ 1048.235804] env[68279]: DEBUG nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.236640] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd6bfec-5fb3-4701-bd63-22e84d16c910 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.580852] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963672, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.612749] env[68279]: INFO nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Took 20.05 seconds to build instance. [ 1048.692332] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bce8a4d8-219c-4cf8-9900-9e56ee676b12 tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "0731fdf9-f90c-46a4-9165-f6d91767b51b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.414s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.726116] env[68279]: DEBUG nova.network.neutron [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.727369] env[68279]: DEBUG oslo_vmware.api [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963676, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256478} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.730433] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1048.730625] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1048.730804] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1048.730971] env[68279]: INFO nova.compute.manager [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1048.731257] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1048.731642] env[68279]: DEBUG nova.compute.manager [-] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1048.731754] env[68279]: DEBUG nova.network.neutron [-] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1048.758434] env[68279]: INFO nova.compute.manager [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Took 20.16 seconds to build instance. [ 1048.823489] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.823768] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.824485] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.824485] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.824485] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.826869] env[68279]: INFO nova.compute.manager [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Terminating instance [ 1048.965898] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639c9371-107a-4956-b478-662f3521569f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.975247] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19348dfd-a54f-4528-8af8-4a8e9497562b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.011300] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a21ee6-e3b4-44ba-b39a-43386fc58ed6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.018747] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274119cc-d37c-4cfc-8742-062b961b2551 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.037348] env[68279]: DEBUG nova.compute.provider_tree [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.080875] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963672, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585611} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.081306] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.081597] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.081900] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea6ace4d-44c2-4feb-bdc3-a03fd1e0c607 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.091526] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1049.091526] env[68279]: value = "task-2963677" [ 1049.091526] env[68279]: _type = "Task" [ 1049.091526] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.102731] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963677, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.116293] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.571s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.229089] env[68279]: INFO nova.compute.manager [-] [instance: f4963730-d516-48b7-a320-8af731831a30] Took 1.03 seconds to deallocate network for instance. [ 1049.261039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-71acf069-e004-401a-90c0-79e9879c7b09 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "0b85c3a6-f413-49b1-9936-222117368995" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.671s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.308094] env[68279]: DEBUG nova.compute.manager [req-cc0deb5c-dcae-42ff-be18-8ccd8707e236 req-08c48ad9-fef1-4298-8bfb-50a411b313a1 service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Received event network-vif-deleted-f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1049.308094] env[68279]: INFO nova.compute.manager [req-cc0deb5c-dcae-42ff-be18-8ccd8707e236 req-08c48ad9-fef1-4298-8bfb-50a411b313a1 service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Neutron deleted interface f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7; detaching it from the instance and deleting it from the info cache [ 1049.308216] env[68279]: DEBUG nova.network.neutron [req-cc0deb5c-dcae-42ff-be18-8ccd8707e236 req-08c48ad9-fef1-4298-8bfb-50a411b313a1 service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.333803] env[68279]: DEBUG nova.compute.manager [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.334049] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.335532] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95af54c6-25af-403c-8dda-744618a40987 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.344205] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.344465] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3ccdbb0-00e4-4483-bcfd-1da7c9cafd20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.352514] env[68279]: DEBUG oslo_vmware.api [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1049.352514] env[68279]: value = "task-2963678" [ 1049.352514] env[68279]: _type = "Task" [ 1049.352514] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.362433] env[68279]: DEBUG oslo_vmware.api [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.541129] env[68279]: DEBUG nova.scheduler.client.report [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.603899] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963677, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069533} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.605388] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1049.605388] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08eb93a6-2d05-4905-9258-b27b18e9c293 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.629897] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1049.630620] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3992ed18-daea-4fc1-b7e0-88d0fb5dde48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.658136] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1049.658136] env[68279]: value = "task-2963679" [ 1049.658136] env[68279]: _type = "Task" [ 1049.658136] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.668645] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.679028] env[68279]: DEBUG nova.network.neutron [-] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.735954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.810823] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8cc4fd3-7739-43e5-a198-8dccfe4df245 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.821256] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3514dd21-5a9a-47b7-938e-44d67a867f95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.833172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "594af7a0-1d0a-43ca-947a-8c5614a289d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.833172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.833275] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "594af7a0-1d0a-43ca-947a-8c5614a289d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.833434] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.833601] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.835830] env[68279]: INFO nova.compute.manager [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Terminating instance [ 1049.863045] env[68279]: DEBUG nova.compute.manager [req-cc0deb5c-dcae-42ff-be18-8ccd8707e236 req-08c48ad9-fef1-4298-8bfb-50a411b313a1 service nova] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Detach interface failed, port_id=f603cc38-e12d-4dfe-9cf8-a4bbbaa5f7e7, reason: Instance f38a489d-ddcb-4a66-bb60-058d46ed69db could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1049.872613] env[68279]: DEBUG oslo_vmware.api [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963678, 'name': PowerOffVM_Task, 'duration_secs': 0.292683} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.872890] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.873086] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.873350] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b46b5177-b5a7-4abd-97bb-514a3a7b6b88 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.902261] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "0b85c3a6-f413-49b1-9936-222117368995" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.903298] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "0b85c3a6-f413-49b1-9936-222117368995" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.903298] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "0b85c3a6-f413-49b1-9936-222117368995-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.903298] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "0b85c3a6-f413-49b1-9936-222117368995-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.903298] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "0b85c3a6-f413-49b1-9936-222117368995-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.905269] env[68279]: INFO nova.compute.manager [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Terminating instance [ 1049.941416] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.941798] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.942136] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Deleting the datastore file [datastore1] efda54fe-09a3-4653-b16a-8b3cdd4849c5 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.942521] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4e1aac7-b209-4366-be44-d1b4fe037aa2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.951097] env[68279]: DEBUG oslo_vmware.api [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for the task: (returnval){ [ 1049.951097] env[68279]: value = "task-2963681" [ 1049.951097] env[68279]: _type = "Task" [ 1049.951097] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.960193] env[68279]: DEBUG oslo_vmware.api [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.046430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.899s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.050258] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.444s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.050825] env[68279]: DEBUG nova.objects.instance [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lazy-loading 'resources' on Instance uuid c62a0d0e-8869-482a-a687-c628b96d6e22 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.078071] env[68279]: INFO nova.scheduler.client.report [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Deleted allocations for instance e6f39528-384c-456b-8155-a6856bab3ce0 [ 1050.169250] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963679, 'name': ReconfigVM_Task, 'duration_secs': 0.436049} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.169569] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Reconfigured VM instance instance-00000062 to attach disk [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.170746] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71eff4bd-9274-4d8c-8535-53a6e64bb31d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.178922] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1050.178922] env[68279]: value = "task-2963682" [ 1050.178922] env[68279]: _type = "Task" [ 1050.178922] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.182527] env[68279]: INFO nova.compute.manager [-] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Took 1.45 seconds to deallocate network for instance. [ 1050.190898] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963682, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.206733] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36aaed06-6782-4794-95dd-03ea67872690 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.215863] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-68252301-cd36-4b10-9754-f72f457d52fe tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Suspending the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1050.215863] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d00dbfc0-09b1-4a83-94ac-0e74ee75bec1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.222226] env[68279]: DEBUG oslo_vmware.api [None req-68252301-cd36-4b10-9754-f72f457d52fe tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1050.222226] env[68279]: value = "task-2963683" [ 1050.222226] env[68279]: _type = "Task" [ 1050.222226] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.232479] env[68279]: DEBUG oslo_vmware.api [None req-68252301-cd36-4b10-9754-f72f457d52fe tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963683, 'name': SuspendVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.340299] env[68279]: DEBUG nova.compute.manager [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1050.340570] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.341639] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b988ee30-fe3e-45a8-8c15-963348410bb9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.351267] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.351556] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0515ee1-6f58-46c5-84b5-f0f30f17adcf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.360512] env[68279]: DEBUG oslo_vmware.api [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1050.360512] env[68279]: value = "task-2963685" [ 1050.360512] env[68279]: _type = "Task" [ 1050.360512] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.371346] env[68279]: DEBUG oslo_vmware.api [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.410696] env[68279]: DEBUG nova.compute.manager [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1050.412079] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.412724] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353298ce-6d72-4cae-9889-9e0175b3e85c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.441764] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.442132] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21b1ab60-c79f-43f0-a878-7c1a3698d6d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.451992] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1050.451992] env[68279]: value = "task-2963686" [ 1050.451992] env[68279]: _type = "Task" [ 1050.451992] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.466627] env[68279]: DEBUG oslo_vmware.api [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Task: {'id': task-2963681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252425} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.470509] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.470744] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.470958] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.471242] env[68279]: INFO nova.compute.manager [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1050.471529] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.471744] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.471988] env[68279]: DEBUG nova.compute.manager [-] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.472092] env[68279]: DEBUG nova.network.neutron [-] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.589266] env[68279]: DEBUG oslo_concurrency.lockutils [None req-07fc437f-880d-4f17-9562-fe587c5c7e4d tempest-ListServersNegativeTestJSON-67697346 tempest-ListServersNegativeTestJSON-67697346-project-member] Lock "e6f39528-384c-456b-8155-a6856bab3ce0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.393s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.694022] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963682, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.694022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.741648] env[68279]: DEBUG oslo_vmware.api [None req-68252301-cd36-4b10-9754-f72f457d52fe tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963683, 'name': SuspendVM_Task} progress is 62%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.873478] env[68279]: DEBUG oslo_vmware.api [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963685, 'name': PowerOffVM_Task, 'duration_secs': 0.250057} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.873478] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.873653] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.873913] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fe1a89b-6371-4b14-8193-367e66c37aff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.897163] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7465d98d-57e8-47dd-a9ec-9f615f962414 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.905609] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2edaba0-088b-4ce0-b13b-9fa0720aacc2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.960836] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cba287-366e-44a5-9cd3-4e36882ee610 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.963733] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1050.963934] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1050.964164] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleting the datastore file [datastore2] 594af7a0-1d0a-43ca-947a-8c5614a289d9 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.964720] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcbd5475-d116-4ab1-891e-4461da3ac45a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.977811] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.981542] env[68279]: DEBUG oslo_vmware.api [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1050.981542] env[68279]: value = "task-2963688" [ 1050.981542] env[68279]: _type = "Task" [ 1050.981542] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.982942] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e6cf53-b034-4a76-833e-52a27e6fb4dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.003551] env[68279]: DEBUG nova.compute.provider_tree [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.009032] env[68279]: DEBUG oslo_vmware.api [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.193159] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963682, 'name': Rename_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.237291] env[68279]: DEBUG oslo_vmware.api [None req-68252301-cd36-4b10-9754-f72f457d52fe tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963683, 'name': SuspendVM_Task, 'duration_secs': 0.924289} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.238040] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-68252301-cd36-4b10-9754-f72f457d52fe tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Suspended the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1051.238040] env[68279]: DEBUG nova.compute.manager [None req-68252301-cd36-4b10-9754-f72f457d52fe tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1051.238697] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a8a084-2b8e-4f13-b07a-853a6690d4ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.341828] env[68279]: DEBUG nova.compute.manager [req-b62eb654-92f4-4b59-a466-9b7bd57d0fb2 req-d24ac2b5-51fc-4ebe-aa97-53b0bf55b4a3 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Received event network-vif-deleted-a36ff15f-dc24-4fe7-aaf1-66caad63a54f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.341828] env[68279]: INFO nova.compute.manager [req-b62eb654-92f4-4b59-a466-9b7bd57d0fb2 req-d24ac2b5-51fc-4ebe-aa97-53b0bf55b4a3 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Neutron deleted interface a36ff15f-dc24-4fe7-aaf1-66caad63a54f; detaching it from the instance and deleting it from the info cache [ 1051.341828] env[68279]: DEBUG nova.network.neutron [req-b62eb654-92f4-4b59-a466-9b7bd57d0fb2 req-d24ac2b5-51fc-4ebe-aa97-53b0bf55b4a3 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.470524] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.502768] env[68279]: DEBUG oslo_vmware.api [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218196} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.502984] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.503227] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.503535] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.503768] env[68279]: INFO nova.compute.manager [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1051.504421] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.504687] env[68279]: DEBUG nova.compute.manager [-] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1051.504801] env[68279]: DEBUG nova.network.neutron [-] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1051.510762] env[68279]: DEBUG nova.scheduler.client.report [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.693867] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963682, 'name': Rename_Task, 'duration_secs': 1.190708} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.694176] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1051.694645] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-164c3949-22a4-4f03-a02e-2fc0ccb22675 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.705589] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1051.705589] env[68279]: value = "task-2963689" [ 1051.705589] env[68279]: _type = "Task" [ 1051.705589] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.715960] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.754603] env[68279]: DEBUG nova.network.neutron [-] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.847231] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0e7972f-0ac6-4520-bbd8-2bb31920f0e0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.861508] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ebfcad-b20d-461f-a00c-464ea6d12dcb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.904092] env[68279]: DEBUG nova.compute.manager [req-b62eb654-92f4-4b59-a466-9b7bd57d0fb2 req-d24ac2b5-51fc-4ebe-aa97-53b0bf55b4a3 service nova] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Detach interface failed, port_id=a36ff15f-dc24-4fe7-aaf1-66caad63a54f, reason: Instance efda54fe-09a3-4653-b16a-8b3cdd4849c5 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1051.967216] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963686, 'name': PowerOffVM_Task, 'duration_secs': 1.163895} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.967527] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.967704] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.968245] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ede0313a-e474-4d5a-b8f1-e82f71b2cca3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.016412] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.019774] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 17.349s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.048525] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.049074] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.049074] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleting the datastore file [datastore1] 0b85c3a6-f413-49b1-9936-222117368995 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.051491] env[68279]: INFO nova.scheduler.client.report [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleted allocations for instance c62a0d0e-8869-482a-a687-c628b96d6e22 [ 1052.053445] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4732a27c-86e3-4968-8e1c-e972cf29a2fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.070226] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for the task: (returnval){ [ 1052.070226] env[68279]: value = "task-2963691" [ 1052.070226] env[68279]: _type = "Task" [ 1052.070226] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.081372] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.222922] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963689, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.252136] env[68279]: DEBUG nova.network.neutron [-] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.257588] env[68279]: INFO nova.compute.manager [-] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Took 1.79 seconds to deallocate network for instance. [ 1052.526362] env[68279]: INFO nova.compute.claims [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.564701] env[68279]: DEBUG oslo_concurrency.lockutils [None req-01f74111-464a-44b1-a54c-dbe18aa859a0 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "c62a0d0e-8869-482a-a687-c628b96d6e22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.052s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.579591] env[68279]: DEBUG oslo_vmware.api [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Task: {'id': task-2963691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252429} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.580027] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.580355] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.580661] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.580948] env[68279]: INFO nova.compute.manager [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Took 2.17 seconds to destroy the instance on the hypervisor. [ 1052.581329] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.581650] env[68279]: DEBUG nova.compute.manager [-] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1052.581855] env[68279]: DEBUG nova.network.neutron [-] [instance: 0b85c3a6-f413-49b1-9936-222117368995] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.720846] env[68279]: DEBUG oslo_vmware.api [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963689, 'name': PowerOnVM_Task, 'duration_secs': 0.821575} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.721175] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1052.721345] env[68279]: INFO nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1052.721838] env[68279]: DEBUG nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.722631] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69876ed4-c9da-4200-a72c-5804cdc6c9cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.737828] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1052.738072] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1052.738903] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2a3c43-75a1-4845-9ccd-9b06516ead24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.756752] env[68279]: INFO nova.compute.manager [-] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Took 1.25 seconds to deallocate network for instance. [ 1052.760645] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1666daab-f62e-44e5-aaa5-7302b2fe3b13 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.767163] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.787093] env[68279]: INFO nova.compute.manager [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Resuming [ 1052.787579] env[68279]: DEBUG nova.objects.instance [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lazy-loading 'flavor' on Instance uuid daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.797123] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4/volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.798829] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0723ff4-03b4-4fca-bf69-2988218bbdae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.823531] env[68279]: DEBUG oslo_vmware.api [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1052.823531] env[68279]: value = "task-2963692" [ 1052.823531] env[68279]: _type = "Task" [ 1052.823531] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.839502] env[68279]: DEBUG oslo_vmware.api [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.034421] env[68279]: INFO nova.compute.resource_tracker [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating resource usage from migration 70f75b68-4652-4b55-a1d2-21a134e0012d [ 1053.244278] env[68279]: INFO nova.compute.manager [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Took 23.48 seconds to build instance. [ 1053.268501] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.301838] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08a0da5-ec3e-4e0d-90ec-f3617fcb65b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.311988] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e545be84-ef7f-4e33-9234-8b5172d95e1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.348053] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652de5bb-8e2a-497b-bd0b-d2d358a61653 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.359224] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ef73ab-2c29-4abc-9962-049d4dd92301 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.363118] env[68279]: DEBUG oslo_vmware.api [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963692, 'name': ReconfigVM_Task, 'duration_secs': 0.371721} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.363675] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfigured VM instance instance-0000005a to attach disk [datastore1] volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4/volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.368619] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ee748ef-f2d1-4b24-b12f-374037b2d9ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.388957] env[68279]: DEBUG nova.compute.provider_tree [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.394507] env[68279]: DEBUG nova.compute.manager [req-f9d384f0-efd5-45c4-a760-325595b1130d req-43545e04-d5ba-4db0-bbef-458f04c7fc08 service nova] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Received event network-vif-deleted-b0aaf790-98b7-4938-a783-54a408c8f485 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.394507] env[68279]: DEBUG nova.compute.manager [req-f9d384f0-efd5-45c4-a760-325595b1130d req-43545e04-d5ba-4db0-bbef-458f04c7fc08 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Received event network-vif-deleted-629e5271-6f84-4de3-ac7a-5ca46f3cdfc0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.394507] env[68279]: INFO nova.compute.manager [req-f9d384f0-efd5-45c4-a760-325595b1130d req-43545e04-d5ba-4db0-bbef-458f04c7fc08 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Neutron deleted interface 629e5271-6f84-4de3-ac7a-5ca46f3cdfc0; detaching it from the instance and deleting it from the info cache [ 1053.394507] env[68279]: DEBUG nova.network.neutron [req-f9d384f0-efd5-45c4-a760-325595b1130d req-43545e04-d5ba-4db0-bbef-458f04c7fc08 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.399991] env[68279]: DEBUG oslo_vmware.api [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1053.399991] env[68279]: value = "task-2963693" [ 1053.399991] env[68279]: _type = "Task" [ 1053.399991] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.410651] env[68279]: DEBUG oslo_vmware.api [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963693, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.473168] env[68279]: DEBUG nova.network.neutron [-] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.746952] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8c98374c-5310-48f9-ae36-648b2ec5b5d7 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.995s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.894511] env[68279]: DEBUG nova.scheduler.client.report [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1053.898516] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0861c2a3-b41b-44b1-aa01-0b28aa1b32ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.916284] env[68279]: DEBUG oslo_vmware.api [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963693, 'name': ReconfigVM_Task, 'duration_secs': 0.172409} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.917859] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1053.923625] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1cab51-9ea2-4b92-887b-227f7ef20c1c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.960217] env[68279]: DEBUG nova.compute.manager [req-f9d384f0-efd5-45c4-a760-325595b1130d req-43545e04-d5ba-4db0-bbef-458f04c7fc08 service nova] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Detach interface failed, port_id=629e5271-6f84-4de3-ac7a-5ca46f3cdfc0, reason: Instance 0b85c3a6-f413-49b1-9936-222117368995 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1053.976089] env[68279]: INFO nova.compute.manager [-] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Took 1.39 seconds to deallocate network for instance. [ 1053.982575] env[68279]: DEBUG nova.compute.manager [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-changed-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.982725] env[68279]: DEBUG nova.compute.manager [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing instance network info cache due to event network-changed-343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1053.982935] env[68279]: DEBUG oslo_concurrency.lockutils [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] Acquiring lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.983352] env[68279]: DEBUG oslo_concurrency.lockutils [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] Acquired lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.983352] env[68279]: DEBUG nova.network.neutron [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1054.310326] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.310582] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquired lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.310790] env[68279]: DEBUG nova.network.neutron [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1054.399529] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.380s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.399750] env[68279]: INFO nova.compute.manager [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Migrating [ 1054.406496] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.726s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.407971] env[68279]: INFO nova.compute.claims [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1054.485581] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.784561] env[68279]: DEBUG nova.network.neutron [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updated VIF entry in instance network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.786157] env[68279]: DEBUG nova.network.neutron [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343369ce-f2", "ovs_interfaceid": "343369ce-f2d1-401a-9a78-b72854001a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.819017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.819272] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.920568] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.920740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.920916] env[68279]: DEBUG nova.network.neutron [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1054.971034] env[68279]: DEBUG nova.objects.instance [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'flavor' on Instance uuid 50f390b2-99b7-49f3-997f-7d7b50cff9f2 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.034823] env[68279]: DEBUG nova.network.neutron [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [{"id": "a047ea62-0c74-4967-820e-75553a4d8d7c", "address": "fa:16:3e:77:3b:51", "network": {"id": "cb396ea1-c22a-450c-9b6e-1cfd64943fbf", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1515979186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d27076ab7348bb9ca331f4ff68e46f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa047ea62-0c", "ovs_interfaceid": "a047ea62-0c74-4967-820e-75553a4d8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.287611] env[68279]: DEBUG oslo_concurrency.lockutils [req-0d2c6ded-87e3-4d19-b4a1-78ad471b738a req-c765dded-4603-461a-8e4b-256aa72be77c service nova] Releasing lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.323130] env[68279]: DEBUG nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1055.478582] env[68279]: DEBUG oslo_concurrency.lockutils [None req-537d6d42-c02c-4f8c-9917-72e3925c5aab tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.862s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.539695] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Releasing lock "refresh_cache-daccaa30-1011-4c7d-a668-05f9329ab4d5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.540712] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d84b3a7-cb06-43c4-9843-d5a0e4ff2afc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.553742] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Resuming the VM {{(pid=68279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1055.554045] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69a54e7d-b370-42d5-bf91-e394b6aa3018 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.562668] env[68279]: DEBUG oslo_vmware.api [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1055.562668] env[68279]: value = "task-2963694" [ 1055.562668] env[68279]: _type = "Task" [ 1055.562668] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.577091] env[68279]: DEBUG oslo_vmware.api [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963694, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.760828] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fb6ec4-4714-4981-9727-0f86a73a9bf7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.769260] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742e4f64-c4de-4364-8323-f4e97a7d03f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.801604] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b038217-848d-4103-b721-92d3068c6e87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.812977] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab34637-3433-4d6b-b259-810067ba31e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.818604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.818915] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.833218] env[68279]: DEBUG nova.compute.provider_tree [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.848652] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.975556] env[68279]: DEBUG nova.network.neutron [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance_info_cache with network_info: [{"id": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "address": "fa:16:3e:ad:49:45", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ecf3bb-21", "ovs_interfaceid": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.079678] env[68279]: DEBUG oslo_vmware.api [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963694, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.321671] env[68279]: INFO nova.compute.manager [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Detaching volume ad42c3b3-ce0d-4cb3-b4a4-3be32274b555 [ 1056.338697] env[68279]: DEBUG nova.scheduler.client.report [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.353660] env[68279]: INFO nova.virt.block_device [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Attempting to driver detach volume ad42c3b3-ce0d-4cb3-b4a4-3be32274b555 from mountpoint /dev/sdb [ 1056.353907] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1056.354113] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594692', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'name': 'volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3763645-5a78-4929-98a3-108e72071211', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'serial': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1056.355026] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d535df6d-5801-49cb-8f4d-aa16a6abdda7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.382178] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a709ae36-df71-4302-a1f0-5b0fb32cfdfb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.390615] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adbb460-1787-4c88-8ba0-6ff303786422 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.414804] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfee17b-484a-4eee-9194-aa97b98040b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.433921] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] The volume has not been displaced from its original location: [datastore1] volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555/volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1056.439506] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Reconfiguring VM instance instance-00000038 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1056.439879] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14251f33-b425-48d4-a819-88697669a1fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.460580] env[68279]: DEBUG oslo_vmware.api [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1056.460580] env[68279]: value = "task-2963695" [ 1056.460580] env[68279]: _type = "Task" [ 1056.460580] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.468866] env[68279]: DEBUG oslo_vmware.api [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963695, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.478460] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.573389] env[68279]: DEBUG oslo_vmware.api [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963694, 'name': PowerOnVM_Task, 'duration_secs': 0.533214} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.573686] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Resumed the VM {{(pid=68279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1056.573922] env[68279]: DEBUG nova.compute.manager [None req-6f5411d1-a294-4466-9747-07d9badd798d tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.574710] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404e871a-f001-4dd6-a30d-40eb0f54b27b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.743052] env[68279]: INFO nova.compute.manager [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Rebuilding instance [ 1056.780219] env[68279]: DEBUG nova.compute.manager [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.781120] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89edb325-d068-47b2-b984-95279a317595 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.844142] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.844635] env[68279]: DEBUG nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1056.847354] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.110s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.848740] env[68279]: INFO nova.compute.claims [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.971089] env[68279]: DEBUG oslo_vmware.api [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963695, 'name': ReconfigVM_Task, 'duration_secs': 0.244344} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.971363] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Reconfigured VM instance instance-00000038 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1056.976127] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59cbeb0d-bfcb-4f81-9eae-a045b02cdf70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.995559] env[68279]: DEBUG oslo_vmware.api [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1056.995559] env[68279]: value = "task-2963696" [ 1056.995559] env[68279]: _type = "Task" [ 1056.995559] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.004037] env[68279]: DEBUG oslo_vmware.api [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.353466] env[68279]: DEBUG nova.compute.utils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1057.358054] env[68279]: DEBUG nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1057.358054] env[68279]: DEBUG nova.network.neutron [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1057.447010] env[68279]: DEBUG nova.policy [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1057.507650] env[68279]: DEBUG oslo_vmware.api [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963696, 'name': ReconfigVM_Task, 'duration_secs': 0.136779} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.507932] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594692', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'name': 'volume-ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3763645-5a78-4929-98a3-108e72071211', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555', 'serial': 'ad42c3b3-ce0d-4cb3-b4a4-3be32274b555'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1057.794170] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.794494] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43fc3916-fc0d-4ce2-9cc5-49bc937ccd33 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.804131] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1057.804131] env[68279]: value = "task-2963697" [ 1057.804131] env[68279]: _type = "Task" [ 1057.804131] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.812998] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.859212] env[68279]: DEBUG nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1057.902852] env[68279]: DEBUG nova.network.neutron [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Successfully created port: 7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1057.998403] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da71c665-8486-42e0-a9af-f4b3bbec1fcf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.020615] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance 'a96ea5b4-39c5-4a24-873f-54480f876fbf' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1058.057376] env[68279]: DEBUG nova.objects.instance [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.175725] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea5c4dd-2a10-4855-890a-27327906d67d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.183788] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec60f693-baa8-4d85-8823-d2ae8f584307 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.215560] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e5ebda-f621-4c42-ada2-7c6e54a785cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.223877] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4832a2b6-56a3-4b40-b986-e6d3ab0bda4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.240733] env[68279]: DEBUG nova.compute.provider_tree [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.314618] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963697, 'name': PowerOffVM_Task, 'duration_secs': 0.219797} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.314897] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.371151] env[68279]: INFO nova.compute.manager [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Detaching volume 5831cc8f-f303-46ba-a0ca-3334dbc1eeb4 [ 1058.403016] env[68279]: INFO nova.virt.block_device [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Attempting to driver detach volume 5831cc8f-f303-46ba-a0ca-3334dbc1eeb4 from mountpoint /dev/sdb [ 1058.403222] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1058.403418] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1058.404597] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77be0cd4-13cf-4113-8c56-2f9aa4bdbaa9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.427852] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae741c5-31ee-405c-9daf-c8c8d26c1134 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.436027] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0f4527-4e7c-4968-80b6-b27c750bf973 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.456676] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173c7294-ed09-4d5d-8b59-03fda0f76ce7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.473340] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] The volume has not been displaced from its original location: [datastore1] volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4/volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1058.478637] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1058.478980] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8eb33b0-2199-4703-867e-1d603ffa72ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.502409] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1058.502409] env[68279]: value = "task-2963698" [ 1058.502409] env[68279]: _type = "Task" [ 1058.502409] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.515022] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963698, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.527240] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.527682] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-356a3351-4f6e-450f-b9e1-ce7409efb705 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.537364] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1058.537364] env[68279]: value = "task-2963699" [ 1058.537364] env[68279]: _type = "Task" [ 1058.537364] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.546510] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.741637] env[68279]: DEBUG nova.scheduler.client.report [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.875085] env[68279]: DEBUG nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1058.903421] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1058.903712] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.903836] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1058.904034] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.904172] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1058.904324] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1058.904529] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1058.904721] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1058.904903] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1058.905188] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1058.905329] env[68279]: DEBUG nova.virt.hardware [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1058.906306] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94a83bc-7286-43b7-848f-dd568eb07a47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.915383] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272ae7bf-19d2-4b2f-8597-37a0af66258b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.998875] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.012688] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963698, 'name': ReconfigVM_Task, 'duration_secs': 0.228724} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.012976] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1059.018309] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22cb669c-f812-453b-9b7e-f87fe48a6876 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.036338] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1059.036338] env[68279]: value = "task-2963700" [ 1059.036338] env[68279]: _type = "Task" [ 1059.036338] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.049719] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963700, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.052955] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963699, 'name': PowerOffVM_Task, 'duration_secs': 0.197026} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.053234] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.053532] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance 'a96ea5b4-39c5-4a24-873f-54480f876fbf' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1059.066390] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3ce34621-8b03-4cda-adad-668335b5fbd2 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.247s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.067496] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.069s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.067677] env[68279]: DEBUG nova.compute.manager [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1059.068748] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8044416-1d2f-4be1-8138-084c4c2e8343 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.076107] env[68279]: DEBUG nova.compute.manager [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1059.076750] env[68279]: DEBUG nova.objects.instance [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.247437] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.248111] env[68279]: DEBUG nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1059.251675] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.773s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.252068] env[68279]: DEBUG nova.objects.instance [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lazy-loading 'resources' on Instance uuid c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.419144] env[68279]: DEBUG nova.compute.manager [req-2aab313d-3f04-4012-ac8d-063a15ed3484 req-f4fd1668-4519-47fd-951b-313ba50b0321 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received event network-vif-plugged-7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.419144] env[68279]: DEBUG oslo_concurrency.lockutils [req-2aab313d-3f04-4012-ac8d-063a15ed3484 req-f4fd1668-4519-47fd-951b-313ba50b0321 service nova] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.419144] env[68279]: DEBUG oslo_concurrency.lockutils [req-2aab313d-3f04-4012-ac8d-063a15ed3484 req-f4fd1668-4519-47fd-951b-313ba50b0321 service nova] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.419144] env[68279]: DEBUG oslo_concurrency.lockutils [req-2aab313d-3f04-4012-ac8d-063a15ed3484 req-f4fd1668-4519-47fd-951b-313ba50b0321 service nova] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.419144] env[68279]: DEBUG nova.compute.manager [req-2aab313d-3f04-4012-ac8d-063a15ed3484 req-f4fd1668-4519-47fd-951b-313ba50b0321 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] No waiting events found dispatching network-vif-plugged-7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1059.419563] env[68279]: WARNING nova.compute.manager [req-2aab313d-3f04-4012-ac8d-063a15ed3484 req-f4fd1668-4519-47fd-951b-313ba50b0321 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received unexpected event network-vif-plugged-7243843a-c48e-44d5-990f-1de0a9191cbd for instance with vm_state building and task_state spawning. [ 1059.516871] env[68279]: DEBUG nova.network.neutron [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Successfully updated port: 7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1059.546703] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963700, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.560277] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1059.560534] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.560707] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.560928] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.561045] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.561198] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1059.561445] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1059.561599] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1059.561768] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1059.561928] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1059.562115] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1059.567316] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ffbcf69-81f6-4a17-97fc-a3a584b26350 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.583876] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1059.583876] env[68279]: value = "task-2963701" [ 1059.583876] env[68279]: _type = "Task" [ 1059.583876] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.592715] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963701, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.756131] env[68279]: DEBUG nova.compute.utils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1059.760947] env[68279]: DEBUG nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1059.761074] env[68279]: DEBUG nova.network.neutron [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1059.808486] env[68279]: DEBUG nova.policy [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36b39430ec184c72b8950247fd1added', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37b1b1fd2ea44d83b954e5b90ae9e3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1060.019650] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.019892] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.021019] env[68279]: DEBUG nova.network.neutron [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1060.031502] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2b0e43-aff3-4b26-9280-2d2b8fb90178 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.044260] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26ab0a1-a14e-4436-b80c-903f28321829 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.053341] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963700, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.078307] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1958b01-3e7a-4a5d-8d85-c35bd3eb6e68 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.085024] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.085024] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b2d7740-51f6-4f24-9945-9906fa7e8c66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.091839] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fd6c14-6572-4c37-a680-a143369d1972 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.096792] env[68279]: DEBUG oslo_vmware.api [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1060.096792] env[68279]: value = "task-2963702" [ 1060.096792] env[68279]: _type = "Task" [ 1060.096792] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.112782] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.112782] env[68279]: DEBUG nova.compute.provider_tree [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.120624] env[68279]: DEBUG oslo_vmware.api [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963702, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.157616] env[68279]: DEBUG nova.network.neutron [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Successfully created port: 0ccbc684-0a9c-420e-8e3b-877ae7a284e2 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1060.261137] env[68279]: DEBUG nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1060.550251] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963700, 'name': ReconfigVM_Task, 'duration_secs': 1.136821} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.551073] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1060.553933] env[68279]: DEBUG nova.network.neutron [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1060.602774] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963701, 'name': ReconfigVM_Task, 'duration_secs': 0.7192} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.606847] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance 'a96ea5b4-39c5-4a24-873f-54480f876fbf' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1060.617046] env[68279]: DEBUG nova.scheduler.client.report [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.621165] env[68279]: DEBUG oslo_vmware.api [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963702, 'name': PowerOffVM_Task, 'duration_secs': 0.224192} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.621165] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.621165] env[68279]: DEBUG nova.compute.manager [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1060.621165] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5a38c2-4c9d-4e66-b110-4afdc16e1e6b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.686370] env[68279]: DEBUG nova.network.neutron [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.113736] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1061.114035] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1061.114075] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1061.114256] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1061.114397] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1061.114543] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1061.114743] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1061.114921] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1061.115113] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1061.115278] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1061.115451] env[68279]: DEBUG nova.virt.hardware [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1061.120953] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1061.121984] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.124221] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0eae9a24-3889-438e-bd92-0dcddf94acde {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.138874] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.917s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.139112] env[68279]: DEBUG nova.objects.instance [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] [instance: f4963730-d516-48b7-a320-8af731831a30] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1061.144472] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86092b2c-30c7-444b-9cf7-c5e7d79db57d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.077s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.150880] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1061.150880] env[68279]: value = "task-2963703" [ 1061.150880] env[68279]: _type = "Task" [ 1061.150880] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.162145] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963703, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.163138] env[68279]: INFO nova.scheduler.client.report [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleted allocations for instance c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5 [ 1061.188482] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.188868] env[68279]: DEBUG nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Instance network_info: |[{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1061.189665] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:d5:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7243843a-c48e-44d5-990f-1de0a9191cbd', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.198631] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Creating folder: Project (c765e6d99a8f47d6b932e30e05e54405). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1061.199302] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66d74415-3f79-4c15-88d2-c31b74f36136 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.215573] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Created folder: Project (c765e6d99a8f47d6b932e30e05e54405) in parent group-v594445. [ 1061.215802] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Creating folder: Instances. Parent ref: group-v594717. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1061.216064] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ab5adad-56e3-4f02-a256-8f92f6b3c783 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.229821] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Created folder: Instances in parent group-v594717. [ 1061.231679] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.231679] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.231679] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65f6a57a-e3f4-4104-8b10-6a9865e7debb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.257733] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.257733] env[68279]: value = "task-2963706" [ 1061.257733] env[68279]: _type = "Task" [ 1061.257733] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.266815] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963706, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.271151] env[68279]: DEBUG nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1061.298960] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1061.298960] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1061.298960] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1061.298960] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1061.299278] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1061.299481] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1061.300031] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1061.300031] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1061.300165] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1061.300478] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1061.300740] env[68279]: DEBUG nova.virt.hardware [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1061.301740] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dac1369-d60c-4dec-a786-40491f2d35c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.310683] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41faadc9-6af4-4081-ae21-6f510da977c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.493875] env[68279]: DEBUG nova.compute.manager [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received event network-changed-7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.495178] env[68279]: DEBUG nova.compute.manager [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Refreshing instance network info cache due to event network-changed-7243843a-c48e-44d5-990f-1de0a9191cbd. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1061.495508] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] Acquiring lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.495688] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] Acquired lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.495902] env[68279]: DEBUG nova.network.neutron [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Refreshing network info cache for port 7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.497409] env[68279]: DEBUG nova.objects.instance [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.601606] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1061.601974] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a377b899-9cd7-4bd3-8cf0-9b2a7f0826e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.611427] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1061.611427] env[68279]: value = "task-2963707" [ 1061.611427] env[68279]: _type = "Task" [ 1061.611427] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.620544] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.662793] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963703, 'name': ReconfigVM_Task, 'duration_secs': 0.267977} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.663083] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1061.664069] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9e3226-898a-46c4-bee3-526dce9c8125 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.690781] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] a96ea5b4-39c5-4a24-873f-54480f876fbf/a96ea5b4-39c5-4a24-873f-54480f876fbf.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1061.691439] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7a956d21-6e86-421c-ae2f-bc155a887a50 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.517s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.693884] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6d750d6-f36e-41b8-aca8-4c3309702d10 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.716608] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1061.716608] env[68279]: value = "task-2963708" [ 1061.716608] env[68279]: _type = "Task" [ 1061.716608] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.726787] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963708, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.731170] env[68279]: DEBUG nova.network.neutron [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Successfully updated port: 0ccbc684-0a9c-420e-8e3b-877ae7a284e2 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1061.767638] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963706, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.830695] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.830974] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.831206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.831392] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.831574] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.836012] env[68279]: INFO nova.compute.manager [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Terminating instance [ 1062.009295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.009485] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.009546] env[68279]: DEBUG nova.network.neutron [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.009785] env[68279]: DEBUG nova.objects.instance [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'info_cache' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.127098] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1062.127366] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1062.127525] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1062.128337] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791a3a82-7204-4e42-8c75-47777c9d9b79 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.156355] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8901a21c-7f7b-4f63-b0d2-daa68ea790db tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.160722] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.482s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.160965] env[68279]: DEBUG nova.objects.instance [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lazy-loading 'resources' on Instance uuid 7d15a05a-f827-40a7-b182-5d2b553481c7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.162867] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2e1bcc-7b0b-4a65-ab22-f5e6e433d716 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.171569] env[68279]: WARNING nova.virt.vmwareapi.driver [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1062.171874] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1062.172765] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b425befc-6c3b-4c47-b2b1-cf1f45debbe4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.182518] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1062.182783] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eae0ad4e-1386-4e00-887c-497a2ba0267c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.227980] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963708, 'name': ReconfigVM_Task, 'duration_secs': 0.337602} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.228456] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Reconfigured VM instance instance-0000005c to attach disk [datastore2] a96ea5b4-39c5-4a24-873f-54480f876fbf/a96ea5b4-39c5-4a24-873f-54480f876fbf.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1062.228896] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance 'a96ea5b4-39c5-4a24-873f-54480f876fbf' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1062.233785] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "refresh_cache-525e4894-a8b1-45ae-a846-84ded8d97584" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.233932] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "refresh_cache-525e4894-a8b1-45ae-a846-84ded8d97584" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.234073] env[68279]: DEBUG nova.network.neutron [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.270997] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963706, 'name': CreateVM_Task, 'duration_secs': 0.607352} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.272565] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1062.272971] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1062.273266] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1062.273519] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleting the datastore file [datastore1] 50f390b2-99b7-49f3-997f-7d7b50cff9f2 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.274420] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.274642] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.275066] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1062.275371] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8613fef9-8dff-4a99-af08-8c04821db4b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.277931] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7781d26f-001e-4701-92a1-5e2b12b249fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.283937] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1062.283937] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523c1689-3b40-fb17-c71b-afd5066cef55" [ 1062.283937] env[68279]: _type = "Task" [ 1062.283937] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.295172] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1062.295172] env[68279]: value = "task-2963711" [ 1062.295172] env[68279]: _type = "Task" [ 1062.295172] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.304473] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523c1689-3b40-fb17-c71b-afd5066cef55, 'name': SearchDatastore_Task, 'duration_secs': 0.011461} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.308539] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.308854] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.309188] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.309398] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.309678] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.310339] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68e8ce3d-ead9-47d9-a42d-b9f3db5425fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.319034] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.324411] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.324605] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.325474] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d94b21cf-a606-4db7-83b3-0dc61db20548 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.338030] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1062.338030] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52574abd-1a32-4dfe-8dd6-bd2c963bdae7" [ 1062.338030] env[68279]: _type = "Task" [ 1062.338030] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.341169] env[68279]: DEBUG nova.compute.manager [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1062.341401] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1062.342233] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c9b2e2-1771-4165-b43d-57f9df915045 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.351682] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52574abd-1a32-4dfe-8dd6-bd2c963bdae7, 'name': SearchDatastore_Task, 'duration_secs': 0.015091} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.356383] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1062.356609] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf543c00-7443-4cce-94b1-2ce8d6ee18c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.358942] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bfc3749-4ef3-45d8-8f7c-ca8ca5d3178c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.363863] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1062.363863] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52895f44-e052-6026-09ff-659274f7df28" [ 1062.363863] env[68279]: _type = "Task" [ 1062.363863] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.368839] env[68279]: DEBUG oslo_vmware.api [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1062.368839] env[68279]: value = "task-2963712" [ 1062.368839] env[68279]: _type = "Task" [ 1062.368839] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.376315] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52895f44-e052-6026-09ff-659274f7df28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.379413] env[68279]: DEBUG oslo_vmware.api [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963712, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.421942] env[68279]: DEBUG nova.network.neutron [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updated VIF entry in instance network info cache for port 7243843a-c48e-44d5-990f-1de0a9191cbd. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.422334] env[68279]: DEBUG nova.network.neutron [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.514080] env[68279]: DEBUG nova.objects.base [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1062.740248] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bbe451-0a40-499e-8c6c-67d0eed8bbce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.763197] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f815bb-f878-4743-9d74-9639aca1e131 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.768711] env[68279]: DEBUG nova.network.neutron [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1062.783876] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance 'a96ea5b4-39c5-4a24-873f-54480f876fbf' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1062.816793] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147322} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.817068] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1062.817255] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1062.817429] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1062.878709] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52895f44-e052-6026-09ff-659274f7df28, 'name': SearchDatastore_Task, 'duration_secs': 0.012377} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.879469] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.880089] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f6a65d1b-ba9c-44b7-b9aa-815cabd45176/f6a65d1b-ba9c-44b7-b9aa-815cabd45176.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1062.880089] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0091a036-c35f-40ca-912d-1f2545256483 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.884611] env[68279]: DEBUG oslo_vmware.api [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963712, 'name': PowerOffVM_Task, 'duration_secs': 0.324691} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.885154] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.885458] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1062.885606] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e9927e5-54f8-417d-8e03-e1bb3bde964a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.893874] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1062.893874] env[68279]: value = "task-2963713" [ 1062.893874] env[68279]: _type = "Task" [ 1062.893874] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.903919] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.926346] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2e66813-ea91-482e-b8a3-25a1faacc983 req-14a3abf3-d9d2-4f78-8e55-f95b5fa4d641 service nova] Releasing lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.928021] env[68279]: DEBUG nova.network.neutron [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Updating instance_info_cache with network_info: [{"id": "0ccbc684-0a9c-420e-8e3b-877ae7a284e2", "address": "fa:16:3e:0f:43:f9", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ccbc684-0a", "ovs_interfaceid": "0ccbc684-0a9c-420e-8e3b-877ae7a284e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.947645] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cf8b27-85b3-4614-8ada-a027cc637cf6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.956361] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a94e1c9-8c99-4623-9cde-1c4ee1835781 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.963563] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1062.963781] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1062.963953] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleting the datastore file [datastore2] daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.964243] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a117762-f634-41c9-aa76-7a89c88a02e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.991259] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c749be9-7efd-4d04-b54a-48c6fe896b46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.995236] env[68279]: DEBUG oslo_vmware.api [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for the task: (returnval){ [ 1062.995236] env[68279]: value = "task-2963715" [ 1062.995236] env[68279]: _type = "Task" [ 1062.995236] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.000879] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488cf54a-650a-4846-a9a3-433b5743317d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.009822] env[68279]: DEBUG oslo_vmware.api [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.019368] env[68279]: DEBUG nova.compute.provider_tree [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.323698] env[68279]: INFO nova.virt.block_device [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Booting with volume 5831cc8f-f303-46ba-a0ca-3334dbc1eeb4 at /dev/sdb [ 1063.335392] env[68279]: DEBUG nova.network.neutron [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating instance_info_cache with network_info: [{"id": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "address": "fa:16:3e:0c:82:62", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1c4e041-ce", "ovs_interfaceid": "f1c4e041-ced5-433b-a721-e9fa16d159ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.339061] env[68279]: DEBUG nova.network.neutron [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Port 38ecf3bb-21fe-4683-8cc4-40e133bebe1f binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1063.367454] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4981ea6a-4d13-4c9b-abf2-ae90a556d3c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.381892] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58874dc-b67d-4b5a-83c2-b8c63bbb4dec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.408688] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510158} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.418873] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] f6a65d1b-ba9c-44b7-b9aa-815cabd45176/f6a65d1b-ba9c-44b7-b9aa-815cabd45176.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1063.419152] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1063.419600] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5e8e5bd-8114-48c5-9dc3-fe3ae3b54fdd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.421945] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af1e3a99-0e06-457c-8e3a-26511443c34a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.431957] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31c974d-cd28-463d-8cf1-d8fa452fe43b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.446359] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "refresh_cache-525e4894-a8b1-45ae-a846-84ded8d97584" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.446692] env[68279]: DEBUG nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Instance network_info: |[{"id": "0ccbc684-0a9c-420e-8e3b-877ae7a284e2", "address": "fa:16:3e:0f:43:f9", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ccbc684-0a", "ovs_interfaceid": "0ccbc684-0a9c-420e-8e3b-877ae7a284e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1063.447096] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1063.447096] env[68279]: value = "task-2963716" [ 1063.447096] env[68279]: _type = "Task" [ 1063.447096] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.447472] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:43:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ccbc684-0a9c-420e-8e3b-877ae7a284e2', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1063.455928] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1063.456705] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1063.460317] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43784492-b66a-41b1-89f1-62ff59f118d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.486570] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a140853-dadc-4f5c-9514-1c0c79994ade {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.493302] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963716, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.495077] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1063.495077] env[68279]: value = "task-2963717" [ 1063.495077] env[68279]: _type = "Task" [ 1063.495077] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.507452] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c2ea47-43ce-44c4-be6c-6bea6a816c32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.510011] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963717, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.513465] env[68279]: DEBUG oslo_vmware.api [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Task: {'id': task-2963715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.438939} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.514117] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1063.514354] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1063.514565] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1063.514755] env[68279]: INFO nova.compute.manager [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1063.514992] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1063.515195] env[68279]: DEBUG nova.compute.manager [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1063.515291] env[68279]: DEBUG nova.network.neutron [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1063.522184] env[68279]: DEBUG nova.scheduler.client.report [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.531899] env[68279]: DEBUG nova.virt.block_device [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updating existing volume attachment record: c798490e-40ed-43f3-8698-555b264a67e9 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1063.545860] env[68279]: DEBUG nova.compute.manager [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Received event network-vif-plugged-0ccbc684-0a9c-420e-8e3b-877ae7a284e2 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.546176] env[68279]: DEBUG oslo_concurrency.lockutils [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] Acquiring lock "525e4894-a8b1-45ae-a846-84ded8d97584-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.546395] env[68279]: DEBUG oslo_concurrency.lockutils [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] Lock "525e4894-a8b1-45ae-a846-84ded8d97584-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.546566] env[68279]: DEBUG oslo_concurrency.lockutils [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] Lock "525e4894-a8b1-45ae-a846-84ded8d97584-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.546736] env[68279]: DEBUG nova.compute.manager [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] No waiting events found dispatching network-vif-plugged-0ccbc684-0a9c-420e-8e3b-877ae7a284e2 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.546896] env[68279]: WARNING nova.compute.manager [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Received unexpected event network-vif-plugged-0ccbc684-0a9c-420e-8e3b-877ae7a284e2 for instance with vm_state building and task_state spawning. [ 1063.547075] env[68279]: DEBUG nova.compute.manager [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Received event network-changed-0ccbc684-0a9c-420e-8e3b-877ae7a284e2 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.547233] env[68279]: DEBUG nova.compute.manager [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Refreshing instance network info cache due to event network-changed-0ccbc684-0a9c-420e-8e3b-877ae7a284e2. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1063.547411] env[68279]: DEBUG oslo_concurrency.lockutils [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] Acquiring lock "refresh_cache-525e4894-a8b1-45ae-a846-84ded8d97584" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.547542] env[68279]: DEBUG oslo_concurrency.lockutils [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] Acquired lock "refresh_cache-525e4894-a8b1-45ae-a846-84ded8d97584" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.547693] env[68279]: DEBUG nova.network.neutron [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Refreshing network info cache for port 0ccbc684-0a9c-420e-8e3b-877ae7a284e2 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.812043] env[68279]: DEBUG nova.compute.manager [req-f2be2850-e4ff-419e-901c-c9c2886680f1 req-6cf5c03c-0744-4232-814b-f28afa0059f7 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Received event network-vif-deleted-a047ea62-0c74-4967-820e-75553a4d8d7c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.812407] env[68279]: INFO nova.compute.manager [req-f2be2850-e4ff-419e-901c-c9c2886680f1 req-6cf5c03c-0744-4232-814b-f28afa0059f7 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Neutron deleted interface a047ea62-0c74-4967-820e-75553a4d8d7c; detaching it from the instance and deleting it from the info cache [ 1063.812499] env[68279]: DEBUG nova.network.neutron [req-f2be2850-e4ff-419e-901c-c9c2886680f1 req-6cf5c03c-0744-4232-814b-f28afa0059f7 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.839455] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "refresh_cache-e3763645-5a78-4929-98a3-108e72071211" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.967015] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079094} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.967307] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1063.968082] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0842ab65-184b-4a2b-882f-a81a439cacf5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.990141] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] f6a65d1b-ba9c-44b7-b9aa-815cabd45176/f6a65d1b-ba9c-44b7-b9aa-815cabd45176.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1063.990431] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ed7aa95-a3a3-4bee-b3a5-c5543475014c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.012110] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963717, 'name': CreateVM_Task, 'duration_secs': 0.364265} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.013245] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1064.013571] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1064.013571] env[68279]: value = "task-2963718" [ 1064.013571] env[68279]: _type = "Task" [ 1064.013571] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.014269] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.014430] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.014738] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1064.015040] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a100c80d-56e8-4215-a333-17bd850ff00a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.023638] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1064.023638] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5281b662-d642-1e65-d23f-0684abe8ad95" [ 1064.023638] env[68279]: _type = "Task" [ 1064.023638] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.027268] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.029206] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963718, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.032322] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.297s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.032940] env[68279]: DEBUG nova.objects.instance [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lazy-loading 'resources' on Instance uuid f4963730-d516-48b7-a320-8af731831a30 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.045020] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5281b662-d642-1e65-d23f-0684abe8ad95, 'name': SearchDatastore_Task, 'duration_secs': 0.012219} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.045967] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.046211] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1064.046850] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.046850] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.046850] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.047280] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e879162-0dcc-48f6-aab0-e7ee4565d68f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.056372] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.056550] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1064.057954] env[68279]: INFO nova.scheduler.client.report [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Deleted allocations for instance 7d15a05a-f827-40a7-b182-5d2b553481c7 [ 1064.058870] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40dc4aa5-fc71-4cfa-9af7-b02c67da2f51 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.067440] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1064.067440] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b42c5a-551d-503e-35f1-390e6a89a43b" [ 1064.067440] env[68279]: _type = "Task" [ 1064.067440] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.077620] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b42c5a-551d-503e-35f1-390e6a89a43b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.266572] env[68279]: DEBUG nova.network.neutron [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Updated VIF entry in instance network info cache for port 0ccbc684-0a9c-420e-8e3b-877ae7a284e2. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1064.266976] env[68279]: DEBUG nova.network.neutron [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Updating instance_info_cache with network_info: [{"id": "0ccbc684-0a9c-420e-8e3b-877ae7a284e2", "address": "fa:16:3e:0f:43:f9", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ccbc684-0a", "ovs_interfaceid": "0ccbc684-0a9c-420e-8e3b-877ae7a284e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.293877] env[68279]: DEBUG nova.network.neutron [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.317876] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77188a97-7b0b-4791-81ef-8bd2b6b5f389 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.327801] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a992c8f5-7558-4f71-9f14-0e95fbc71237 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.364673] env[68279]: DEBUG nova.compute.manager [req-f2be2850-e4ff-419e-901c-c9c2886680f1 req-6cf5c03c-0744-4232-814b-f28afa0059f7 service nova] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Detach interface failed, port_id=a047ea62-0c74-4967-820e-75553a4d8d7c, reason: Instance daccaa30-1011-4c7d-a668-05f9329ab4d5 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1064.371364] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.371609] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.371789] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.525923] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963718, 'name': ReconfigVM_Task, 'duration_secs': 0.288259} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.526678] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Reconfigured VM instance instance-00000063 to attach disk [datastore2] f6a65d1b-ba9c-44b7-b9aa-815cabd45176/f6a65d1b-ba9c-44b7-b9aa-815cabd45176.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.526798] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f351926-f3e6-43cc-90e8-f99b4b06505e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.533569] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1064.533569] env[68279]: value = "task-2963719" [ 1064.533569] env[68279]: _type = "Task" [ 1064.533569] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.545224] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963719, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.567206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-837b9b32-cd80-43e8-9b4a-57b117c0891b tempest-ServersWithSpecificFlavorTestJSON-2136933145 tempest-ServersWithSpecificFlavorTestJSON-2136933145-project-member] Lock "7d15a05a-f827-40a7-b182-5d2b553481c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.434s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.580404] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b42c5a-551d-503e-35f1-390e6a89a43b, 'name': SearchDatastore_Task, 'duration_secs': 0.009576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.581861] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e028e235-545c-4f01-9948-4a8501fc8a10 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.591347] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1064.591347] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f54074-3570-3c5d-85bf-05e64726d7aa" [ 1064.591347] env[68279]: _type = "Task" [ 1064.591347] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.608521] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f54074-3570-3c5d-85bf-05e64726d7aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.770087] env[68279]: DEBUG oslo_concurrency.lockutils [req-c7615046-f9ec-4d18-908e-a44d387cbe25 req-df679499-adfa-41f9-941d-e09516d1affc service nova] Releasing lock "refresh_cache-525e4894-a8b1-45ae-a846-84ded8d97584" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.799067] env[68279]: INFO nova.compute.manager [-] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Took 1.28 seconds to deallocate network for instance. [ 1064.799067] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c23a75-fa3f-4322-88c9-6b362f1e63d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.808310] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4611776e-522f-4ccb-839d-d359b65b4734 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.844600] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41282f75-8587-4a5a-8480-b41913dd6180 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.848208] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1064.848476] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89299024-fb1d-4e8e-85f9-1f7090d0ff27 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.855707] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1202deae-420a-4137-a38e-d9d63133c188 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.860940] env[68279]: DEBUG oslo_vmware.api [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1064.860940] env[68279]: value = "task-2963720" [ 1064.860940] env[68279]: _type = "Task" [ 1064.860940] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.872465] env[68279]: DEBUG nova.compute.provider_tree [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.879733] env[68279]: DEBUG oslo_vmware.api [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963720, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.045084] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963719, 'name': Rename_Task, 'duration_secs': 0.354525} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.045384] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1065.045838] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76ac5d1a-1c9e-4d94-b6eb-ede35e3b8d01 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.053915] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1065.053915] env[68279]: value = "task-2963721" [ 1065.053915] env[68279]: _type = "Task" [ 1065.053915] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.063514] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963721, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.103743] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f54074-3570-3c5d-85bf-05e64726d7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.06253} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.105703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.105703] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 525e4894-a8b1-45ae-a846-84ded8d97584/525e4894-a8b1-45ae-a846-84ded8d97584.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1065.108151] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a707e9e9-d7e3-415e-864d-376e6d47d9ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.120375] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1065.120375] env[68279]: value = "task-2963722" [ 1065.120375] env[68279]: _type = "Task" [ 1065.120375] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.130802] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963722, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.309555] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.375737] env[68279]: DEBUG oslo_vmware.api [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963720, 'name': PowerOnVM_Task, 'duration_secs': 0.391395} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.376650] env[68279]: DEBUG nova.scheduler.client.report [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.382611] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1065.382856] env[68279]: DEBUG nova.compute.manager [None req-a8982da9-8e9c-4749-8617-e0e0a9b6278d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1065.383967] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358df7e9-a7ae-4c8f-ad6e-5706eede70fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.419393] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.419597] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.419833] env[68279]: DEBUG nova.network.neutron [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1065.570345] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963721, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.615327] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.615506] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.615734] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.615957] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.616214] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.618841] env[68279]: INFO nova.compute.manager [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Terminating instance [ 1065.631429] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963722, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489029} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.632429] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 525e4894-a8b1-45ae-a846-84ded8d97584/525e4894-a8b1-45ae-a846-84ded8d97584.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.632898] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.632898] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-657ea74a-a5da-4a4d-bba9-745399bc5a12 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.640018] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1065.640018] env[68279]: value = "task-2963723" [ 1065.640018] env[68279]: _type = "Task" [ 1065.640018] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.650253] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963723, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.681971] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.682251] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.682410] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.682593] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.682742] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.682889] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.683105] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.683268] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.683436] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.683598] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.683772] env[68279]: DEBUG nova.virt.hardware [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.684695] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07061161-b22c-41b8-8e23-bccaf7b47364 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.694065] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5087a090-dd1f-446f-a8b1-650721b3bbbc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.709100] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:cf:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5959e66b-7a16-41ba-8c1b-adbc5941455e', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1065.717309] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.717793] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1065.718046] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7dc2b88-9e18-4329-a8d5-e1ce77c8e0f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.739104] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1065.739104] env[68279]: value = "task-2963724" [ 1065.739104] env[68279]: _type = "Task" [ 1065.739104] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.749171] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963724, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.884481] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.886928] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.193s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.887183] env[68279]: DEBUG nova.objects.instance [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'resources' on Instance uuid f38a489d-ddcb-4a66-bb60-058d46ed69db {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.922382] env[68279]: INFO nova.scheduler.client.report [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Deleted allocations for instance f4963730-d516-48b7-a320-8af731831a30 [ 1066.068262] env[68279]: DEBUG oslo_vmware.api [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963721, 'name': PowerOnVM_Task, 'duration_secs': 0.720849} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.068557] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1066.068761] env[68279]: INFO nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Took 7.19 seconds to spawn the instance on the hypervisor. [ 1066.068929] env[68279]: DEBUG nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.069915] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deeb38df-0692-4d21-ad6c-d5b177140778 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.128404] env[68279]: DEBUG nova.compute.manager [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1066.128608] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.129747] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e2bb21-4fcf-4709-a466-5a453dedd077 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.139318] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.139594] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bb83b7c-4212-43ca-9e70-afec2685c3ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.151538] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963723, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224688} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.152996] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1066.153354] env[68279]: DEBUG oslo_vmware.api [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1066.153354] env[68279]: value = "task-2963725" [ 1066.153354] env[68279]: _type = "Task" [ 1066.153354] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.154065] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87069f24-a8bd-4d78-8218-ce774f93e916 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.168598] env[68279]: DEBUG oslo_vmware.api [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963725, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.188566] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 525e4894-a8b1-45ae-a846-84ded8d97584/525e4894-a8b1-45ae-a846-84ded8d97584.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.191380] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7f61c9a-c4dd-435c-92e3-e8645c8aec66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.215476] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1066.215476] env[68279]: value = "task-2963726" [ 1066.215476] env[68279]: _type = "Task" [ 1066.215476] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.226269] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.245554] env[68279]: DEBUG nova.network.neutron [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance_info_cache with network_info: [{"id": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "address": "fa:16:3e:ad:49:45", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ecf3bb-21", "ovs_interfaceid": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.253471] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963724, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.434030] env[68279]: DEBUG oslo_concurrency.lockutils [None req-12278380-dbad-40c4-9bad-e88124ca36ad tempest-ServersListShow298Test-2008305792 tempest-ServersListShow298Test-2008305792-project-member] Lock "f4963730-d516-48b7-a320-8af731831a30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.007s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.591646] env[68279]: INFO nova.compute.manager [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Took 30.93 seconds to build instance. [ 1066.675866] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e45652-b63f-40ad-8b29-e7ffd10a8374 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.689177] env[68279]: DEBUG oslo_vmware.api [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963725, 'name': PowerOffVM_Task, 'duration_secs': 0.337433} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.692184] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1066.692496] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1066.692938] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93ffea30-9224-454e-b70b-2760cb7f5aa5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.696422] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3c3ce4-d996-4aa2-a5b8-ec88721d12eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.740370] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557cfea5-7f68-4425-bb43-e309f0568752 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.749254] env[68279]: DEBUG oslo_concurrency.lockutils [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.757939] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.759457] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fdb799-4e8e-40e7-8266-44107ea8d8d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.766752] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963724, 'name': CreateVM_Task, 'duration_secs': 0.667768} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.767203] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1066.767950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.768113] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.769548] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1066.769548] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c5197ba-4547-42bf-a4c0-d78272560b92 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.780136] env[68279]: DEBUG nova.compute.provider_tree [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.782519] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1066.786025] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1066.786025] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleting the datastore file [datastore1] cfaee7e2-6929-4d8c-8614-e19e0055f2fb {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.786025] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-49b07f7e-ad04-4c26-a95b-d1f98667f3f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.790055] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1066.790055] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52abc8bb-d092-9e52-7595-491ea75f8bae" [ 1066.790055] env[68279]: _type = "Task" [ 1066.790055] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.792453] env[68279]: DEBUG oslo_vmware.api [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1066.792453] env[68279]: value = "task-2963728" [ 1066.792453] env[68279]: _type = "Task" [ 1066.792453] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.796933] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52abc8bb-d092-9e52-7595-491ea75f8bae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.805380] env[68279]: DEBUG oslo_vmware.api [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.093522] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f6a070b3-00b4-4a85-8fc2-b7680403d503 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.439s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.247909] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963726, 'name': ReconfigVM_Task, 'duration_secs': 0.675013} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.248229] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 525e4894-a8b1-45ae-a846-84ded8d97584/525e4894-a8b1-45ae-a846-84ded8d97584.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.248941] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30157b59-ec29-4e55-a4e6-907ed4f898ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.257761] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1067.257761] env[68279]: value = "task-2963729" [ 1067.257761] env[68279]: _type = "Task" [ 1067.257761] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.272209] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963729, 'name': Rename_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.285510] env[68279]: DEBUG nova.scheduler.client.report [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.299179] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24111b87-ef44-4fa0-a668-8b84a072ddb6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.317437] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52abc8bb-d092-9e52-7595-491ea75f8bae, 'name': SearchDatastore_Task, 'duration_secs': 0.013834} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.340101] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.340435] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1067.340744] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.340923] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.341163] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1067.342530] env[68279]: DEBUG oslo_vmware.api [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.513606} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.342530] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbff4f61-3687-4461-9466-5a77aeecfd21 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.345827] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1067.346715] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1067.346893] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1067.347355] env[68279]: INFO nova.compute.manager [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1067.347736] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.350295] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf27c82-5612-4c40-847b-57dd7d5e6a2d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.355615] env[68279]: DEBUG nova.compute.manager [-] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1067.355843] env[68279]: DEBUG nova.network.neutron [-] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1067.366822] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance 'a96ea5b4-39c5-4a24-873f-54480f876fbf' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1067.375065] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1067.375348] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1067.377012] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77347aca-ad49-4620-88eb-eaaf730d7db5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.390566] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1067.390566] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b72075-70b6-ae80-8ad8-0582cf82595a" [ 1067.390566] env[68279]: _type = "Task" [ 1067.390566] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.405952] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b72075-70b6-ae80-8ad8-0582cf82595a, 'name': SearchDatastore_Task, 'duration_secs': 0.011526} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.407382] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c062839-3b57-44fc-acee-b2c2e3ff86b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.417626] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1067.417626] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7f2c5-671c-a23e-36b6-4ecfed858755" [ 1067.417626] env[68279]: _type = "Task" [ 1067.417626] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.428734] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7f2c5-671c-a23e-36b6-4ecfed858755, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.772592] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963729, 'name': Rename_Task, 'duration_secs': 0.19038} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.773503] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1067.773994] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0ac702e-bc51-496c-a831-40463d2621c5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.783234] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1067.783234] env[68279]: value = "task-2963730" [ 1067.783234] env[68279]: _type = "Task" [ 1067.783234] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.792594] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963730, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.801019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.801019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.034s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.801588] env[68279]: DEBUG nova.objects.instance [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lazy-loading 'resources' on Instance uuid efda54fe-09a3-4653-b16a-8b3cdd4849c5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.820062] env[68279]: INFO nova.scheduler.client.report [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted allocations for instance f38a489d-ddcb-4a66-bb60-058d46ed69db [ 1067.880160] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1067.880160] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c844573-7dc5-4e5c-85ae-c4ee2143e4f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.889131] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1067.889131] env[68279]: value = "task-2963731" [ 1067.889131] env[68279]: _type = "Task" [ 1067.889131] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.897175] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.929389] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c7f2c5-671c-a23e-36b6-4ecfed858755, 'name': SearchDatastore_Task, 'duration_secs': 0.012544} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.929843] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.930221] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1067.930670] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19e9f7d6-99c1-41d2-b214-81b72034f899 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.943022] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1067.943022] env[68279]: value = "task-2963732" [ 1067.943022] env[68279]: _type = "Task" [ 1067.943022] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.953247] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963732, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.175050] env[68279]: DEBUG nova.compute.manager [req-a41d8aa0-b3b5-461e-b150-0b638f1326d2 req-30855a82-e7bf-4be2-9dd5-f2d35fd230f4 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Received event network-vif-deleted-d0861d0a-53ee-41f0-b051-d3c2f213a4a7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.175427] env[68279]: INFO nova.compute.manager [req-a41d8aa0-b3b5-461e-b150-0b638f1326d2 req-30855a82-e7bf-4be2-9dd5-f2d35fd230f4 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Neutron deleted interface d0861d0a-53ee-41f0-b051-d3c2f213a4a7; detaching it from the instance and deleting it from the info cache [ 1068.175718] env[68279]: DEBUG nova.network.neutron [req-a41d8aa0-b3b5-461e-b150-0b638f1326d2 req-30855a82-e7bf-4be2-9dd5-f2d35fd230f4 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.300474] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963730, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.328824] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8387ff9e-70eb-43e1-8590-b4b064190126 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "f38a489d-ddcb-4a66-bb60-058d46ed69db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.731s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.404323] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963731, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.453032] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963732, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.601528] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05aa054-a9ac-4058-b9f1-3e33667ff585 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.611163] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c900899b-ce08-4361-9ebf-5d2940ec5970 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.648082] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd12259c-4743-4785-9d0d-93cab8e08e70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.652934] env[68279]: DEBUG nova.network.neutron [-] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.660797] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c7c910-559f-49eb-88c0-0025c7129bba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.679653] env[68279]: DEBUG nova.compute.provider_tree [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.681070] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb4e3d60-6989-4142-91b6-0729e357dd9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.694114] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cf1281-794f-4660-aeb4-22573d9aef5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.731848] env[68279]: DEBUG nova.compute.manager [req-a41d8aa0-b3b5-461e-b150-0b638f1326d2 req-30855a82-e7bf-4be2-9dd5-f2d35fd230f4 service nova] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Detach interface failed, port_id=d0861d0a-53ee-41f0-b051-d3c2f213a4a7, reason: Instance cfaee7e2-6929-4d8c-8614-e19e0055f2fb could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1068.796719] env[68279]: DEBUG oslo_vmware.api [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963730, 'name': PowerOnVM_Task, 'duration_secs': 0.655796} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.797153] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1068.797448] env[68279]: INFO nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Took 7.53 seconds to spawn the instance on the hypervisor. [ 1068.797710] env[68279]: DEBUG nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.798831] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c8a968-0890-4c49-96af-accdc8cb42b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.903073] env[68279]: DEBUG oslo_vmware.api [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963731, 'name': PowerOnVM_Task, 'duration_secs': 0.54408} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.903771] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1068.903771] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-647bbf21-ec22-4281-9706-3ec9d64ee827 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance 'a96ea5b4-39c5-4a24-873f-54480f876fbf' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1068.952961] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963732, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57175} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.953263] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1068.953537] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1068.953730] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9f2f026-2b7c-4c38-9ca7-607e08093ad7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.963662] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1068.963662] env[68279]: value = "task-2963733" [ 1068.963662] env[68279]: _type = "Task" [ 1068.963662] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.975640] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963733, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.157807] env[68279]: INFO nova.compute.manager [-] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Took 1.80 seconds to deallocate network for instance. [ 1069.185479] env[68279]: DEBUG nova.scheduler.client.report [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.321317] env[68279]: INFO nova.compute.manager [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Took 31.60 seconds to build instance. [ 1069.478127] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963733, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075404} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.478415] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1069.479227] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3b97fd-1ee5-4f83-9902-aa18537fdd3c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.505067] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1069.505302] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8eff042-ae99-40d6-8a3b-5c03a8f5aeb5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.529820] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1069.529820] env[68279]: value = "task-2963734" [ 1069.529820] env[68279]: _type = "Task" [ 1069.529820] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.536597] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963734, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.667127] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.696461] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.893s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.697038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.428s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.697131] env[68279]: DEBUG nova.objects.instance [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lazy-loading 'resources' on Instance uuid 594af7a0-1d0a-43ca-947a-8c5614a289d9 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.734571] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.734571] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.739758] env[68279]: INFO nova.scheduler.client.report [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Deleted allocations for instance efda54fe-09a3-4653-b16a-8b3cdd4849c5 [ 1069.823900] env[68279]: DEBUG oslo_concurrency.lockutils [None req-37847f9f-4e49-4ccb-a088-3dcc8403e42a tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "525e4894-a8b1-45ae-a846-84ded8d97584" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.119s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.040347] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963734, 'name': ReconfigVM_Task, 'duration_secs': 0.509939} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.040729] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 50f390b2-99b7-49f3-997f-7d7b50cff9f2/50f390b2-99b7-49f3-997f-7d7b50cff9f2.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1070.042014] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'boot_index': 0, 'encryption_secret_uuid': None, 'encryption_format': None, 'size': 0, 'encrypted': False, 'disk_bus': None, 'guest_format': None, 'encryption_options': None, 'device_name': '/dev/sda', 'device_type': 'disk', 'image_id': '01e502b7-2447-4972-9fe7-fd69f76ef71f'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'}, 'attachment_id': 'c798490e-40ed-43f3-8698-555b264a67e9', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=68279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1070.042238] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1070.042433] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1070.043262] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec0cdb2-cf88-4201-8020-cc65623cbb7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.059166] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b91314-21fb-41c1-8829-31ddf06176d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.084015] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4/volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.084236] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8119002b-2069-42fc-a003-6a660112234d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.103190] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1070.103190] env[68279]: value = "task-2963735" [ 1070.103190] env[68279]: _type = "Task" [ 1070.103190] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.112138] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963735, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.200133] env[68279]: DEBUG nova.compute.manager [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received event network-changed-7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.200397] env[68279]: DEBUG nova.compute.manager [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Refreshing instance network info cache due to event network-changed-7243843a-c48e-44d5-990f-1de0a9191cbd. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1070.200533] env[68279]: DEBUG oslo_concurrency.lockutils [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] Acquiring lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.200764] env[68279]: DEBUG oslo_concurrency.lockutils [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] Acquired lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.200874] env[68279]: DEBUG nova.network.neutron [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Refreshing network info cache for port 7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.242225] env[68279]: DEBUG nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1070.247023] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e3d489bf-0f09-4a55-a2d2-f8bc60f80dbb tempest-ServersTestJSON-2033673195 tempest-ServersTestJSON-2033673195-project-member] Lock "efda54fe-09a3-4653-b16a-8b3cdd4849c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.423s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.412817] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.413061] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.445038] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ff7593-a945-43f3-8cf9-445938f2108a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.454738] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a5c939-7e26-4fb1-8cb2-5017e9864036 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.489364] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfd6948-e6a8-4998-9be3-d372397074e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.498327] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9f27d8-666a-46ac-938c-9929dd5122f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.512908] env[68279]: DEBUG nova.compute.provider_tree [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.615907] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.767296] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.915323] env[68279]: DEBUG nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1070.929909] env[68279]: DEBUG nova.network.neutron [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updated VIF entry in instance network info cache for port 7243843a-c48e-44d5-990f-1de0a9191cbd. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.930391] env[68279]: DEBUG nova.network.neutron [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.016656] env[68279]: DEBUG nova.scheduler.client.report [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.114681] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963735, 'name': ReconfigVM_Task, 'duration_secs': 0.637401} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.114826] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfigured VM instance instance-0000005a to attach disk [datastore1] volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4/volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.120950] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4906165b-9161-4c04-a114-8fed5f159e6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.137874] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1071.137874] env[68279]: value = "task-2963736" [ 1071.137874] env[68279]: _type = "Task" [ 1071.137874] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.146581] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963736, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.364362] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.364619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.364806] env[68279]: DEBUG nova.compute.manager [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Going to confirm migration 4 {{(pid=68279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1071.433786] env[68279]: DEBUG oslo_concurrency.lockutils [req-332dacdc-d18c-45b8-a003-e3c7f66bc370 req-7c3412b2-6f3f-4e60-8f47-a464b6fcb695 service nova] Releasing lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.438622] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.521818] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.524375] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.039s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.525440] env[68279]: DEBUG nova.objects.instance [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lazy-loading 'resources' on Instance uuid 0b85c3a6-f413-49b1-9936-222117368995 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.546620] env[68279]: INFO nova.scheduler.client.report [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted allocations for instance 594af7a0-1d0a-43ca-947a-8c5614a289d9 [ 1071.650876] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963736, 'name': ReconfigVM_Task, 'duration_secs': 0.19424} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.652067] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1071.653630] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58ac18c2-079f-45a2-8597-d3089f964d8e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.662436] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1071.662436] env[68279]: value = "task-2963737" [ 1071.662436] env[68279]: _type = "Task" [ 1071.662436] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.675870] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963737, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.979271] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.979570] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.979624] env[68279]: DEBUG nova.network.neutron [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1071.979782] env[68279]: DEBUG nova.objects.instance [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lazy-loading 'info_cache' on Instance uuid a96ea5b4-39c5-4a24-873f-54480f876fbf {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.058053] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c0b73286-d24d-4a5a-ba2c-d5482577bece tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "594af7a0-1d0a-43ca-947a-8c5614a289d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.225s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.176074] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963737, 'name': Rename_Task, 'duration_secs': 0.327334} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.179312] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.179749] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2562ee2-dd23-4203-b657-44c58a373959 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.187644] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1072.187644] env[68279]: value = "task-2963738" [ 1072.187644] env[68279]: _type = "Task" [ 1072.187644] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.199999] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.288442] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7f21a5-cc85-4975-99ec-2d1b930d3288 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.297422] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6830d5-fb9d-41f1-871b-05886116309f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.336422] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6ad94e-69fd-4ec1-9e5d-eb0c9ced2db8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.345497] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781cfee9-2403-4ff8-b1b2-e2a8e713dd17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.360602] env[68279]: DEBUG nova.compute.provider_tree [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.700116] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963738, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.863820] env[68279]: DEBUG nova.scheduler.client.report [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.200887] env[68279]: DEBUG oslo_vmware.api [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963738, 'name': PowerOnVM_Task, 'duration_secs': 0.860215} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.203808] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.204035] env[68279]: DEBUG nova.compute.manager [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.204839] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6230b39e-e47a-4451-b1ef-b81c000e7130 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.312156] env[68279]: DEBUG nova.network.neutron [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance_info_cache with network_info: [{"id": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "address": "fa:16:3e:ad:49:45", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ecf3bb-21", "ovs_interfaceid": "38ecf3bb-21fe-4683-8cc4-40e133bebe1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.369939] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.845s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.373870] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.525s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.375631] env[68279]: INFO nova.compute.claims [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.404621] env[68279]: INFO nova.scheduler.client.report [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Deleted allocations for instance 0b85c3a6-f413-49b1-9936-222117368995 [ 1073.725692] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.815251] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-a96ea5b4-39c5-4a24-873f-54480f876fbf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.816445] env[68279]: DEBUG nova.objects.instance [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lazy-loading 'migration_context' on Instance uuid a96ea5b4-39c5-4a24-873f-54480f876fbf {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.915586] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bac58676-db66-40c1-b819-66e2f8c79378 tempest-MultipleCreateTestJSON-937708357 tempest-MultipleCreateTestJSON-937708357-project-member] Lock "0b85c3a6-f413-49b1-9936-222117368995" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.013s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.323098] env[68279]: DEBUG nova.objects.base [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1074.324061] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ee86a2-4270-4b3d-b794-f4d4f9d6c559 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.344581] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e7eda96-63a4-4856-a709-059680ab7bf0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.350823] env[68279]: DEBUG oslo_vmware.api [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1074.350823] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c2d34d-8f03-4fbf-c51e-580d6bc76a3f" [ 1074.350823] env[68279]: _type = "Task" [ 1074.350823] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.360426] env[68279]: DEBUG oslo_vmware.api [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c2d34d-8f03-4fbf-c51e-580d6bc76a3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.643801] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a7bdcd-4be6-48ad-87a0-501bee0da060 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.653312] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e63d229-f3b4-448c-89af-98a70561d7c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.701796] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce7b7e4-669b-4043-8f48-12b8086c22ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.709395] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b51e1c-1a67-4774-8fd1-90e3f595c6c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.726962] env[68279]: DEBUG nova.compute.provider_tree [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.862850] env[68279]: DEBUG oslo_vmware.api [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c2d34d-8f03-4fbf-c51e-580d6bc76a3f, 'name': SearchDatastore_Task, 'duration_secs': 0.009576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.862850] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.233381] env[68279]: DEBUG nova.scheduler.client.report [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.736047] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.736047] env[68279]: DEBUG nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1075.738886] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.430s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.739125] env[68279]: DEBUG nova.objects.instance [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lazy-loading 'resources' on Instance uuid daccaa30-1011-4c7d-a668-05f9329ab4d5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.242656] env[68279]: DEBUG nova.compute.utils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1076.248516] env[68279]: DEBUG nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1076.249663] env[68279]: DEBUG nova.network.neutron [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1076.297794] env[68279]: DEBUG nova.policy [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4815a67fd1c410f82905f7ebe2a4c9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e02764848813428dbe0f88c32ad935ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1076.572281] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd085c8-360c-4830-94ad-02503b810b9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.583911] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d14c338-1d1d-437d-a488-a66b05480539 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.386951] env[68279]: DEBUG nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1077.391025] env[68279]: DEBUG nova.network.neutron [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Successfully created port: 1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.397393] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c7ee0d-4df8-4ca0-a83c-4403f58d7503 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.399890] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.400125] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.407934] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c6ecde-7d37-4af6-b2ec-248aff8770ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.424222] env[68279]: DEBUG nova.compute.provider_tree [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.824310] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "296358b1-e978-409c-8113-587ae8f806c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.824542] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "296358b1-e978-409c-8113-587ae8f806c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.903836] env[68279]: DEBUG nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1077.928545] env[68279]: DEBUG nova.scheduler.client.report [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.329587] env[68279]: DEBUG nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1078.399373] env[68279]: DEBUG nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1078.430400] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.430648] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.430805] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.431046] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.431189] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.431338] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.431539] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.431691] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.431882] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.433035] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.433716] env[68279]: DEBUG nova.virt.hardware [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.435696] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d84369-48e1-4dbe-b7b2-51a35da7d809 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.439926] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.701s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.443596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.444108] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.777s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.444336] env[68279]: DEBUG nova.objects.instance [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lazy-loading 'resources' on Instance uuid cfaee7e2-6929-4d8c-8614-e19e0055f2fb {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.451367] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d77fbef-d016-4c4e-849f-4d6aaf8d6e52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.469946] env[68279]: INFO nova.scheduler.client.report [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Deleted allocations for instance daccaa30-1011-4c7d-a668-05f9329ab4d5 [ 1078.849679] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.977775] env[68279]: DEBUG oslo_concurrency.lockutils [None req-569cb95d-5e1d-4e12-8c40-76f2f8ac1e9a tempest-ServersNegativeTestJSON-1316735220 tempest-ServersNegativeTestJSON-1316735220-project-member] Lock "daccaa30-1011-4c7d-a668-05f9329ab4d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.147s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.071332] env[68279]: DEBUG nova.compute.manager [req-40f2a984-3027-4217-b6ea-2b2a8d35aeb3 req-99940396-863c-4312-992a-189e42b7d3d8 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Received event network-vif-plugged-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.071332] env[68279]: DEBUG oslo_concurrency.lockutils [req-40f2a984-3027-4217-b6ea-2b2a8d35aeb3 req-99940396-863c-4312-992a-189e42b7d3d8 service nova] Acquiring lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.071332] env[68279]: DEBUG oslo_concurrency.lockutils [req-40f2a984-3027-4217-b6ea-2b2a8d35aeb3 req-99940396-863c-4312-992a-189e42b7d3d8 service nova] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.071332] env[68279]: DEBUG oslo_concurrency.lockutils [req-40f2a984-3027-4217-b6ea-2b2a8d35aeb3 req-99940396-863c-4312-992a-189e42b7d3d8 service nova] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.071332] env[68279]: DEBUG nova.compute.manager [req-40f2a984-3027-4217-b6ea-2b2a8d35aeb3 req-99940396-863c-4312-992a-189e42b7d3d8 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] No waiting events found dispatching network-vif-plugged-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1079.071332] env[68279]: WARNING nova.compute.manager [req-40f2a984-3027-4217-b6ea-2b2a8d35aeb3 req-99940396-863c-4312-992a-189e42b7d3d8 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Received unexpected event network-vif-plugged-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f for instance with vm_state building and task_state spawning. [ 1079.158991] env[68279]: DEBUG nova.network.neutron [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Successfully updated port: 1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.208073] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a712d43-f689-4fcb-9562-821251692ccc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.218153] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0ddf1f-d160-415f-a61f-b6bed0eff2d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.251544] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fe5825-1d50-4795-b1d4-1bdbcd7b3e20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.261235] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7e23f2-39a1-4758-af18-925acda54a8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.277705] env[68279]: DEBUG nova.compute.provider_tree [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.664842] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.665101] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.665174] env[68279]: DEBUG nova.network.neutron [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.781142] env[68279]: DEBUG nova.scheduler.client.report [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.889444] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.889659] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.208284] env[68279]: DEBUG nova.network.neutron [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.288809] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.845s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.291176] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.524s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.292991] env[68279]: INFO nova.compute.claims [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1080.323472] env[68279]: INFO nova.scheduler.client.report [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleted allocations for instance cfaee7e2-6929-4d8c-8614-e19e0055f2fb [ 1080.366609] env[68279]: DEBUG nova.network.neutron [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updating instance_info_cache with network_info: [{"id": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "address": "fa:16:3e:16:47:e2", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1665daa6-4f", "ovs_interfaceid": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.391787] env[68279]: DEBUG nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1080.832787] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0325654b-8737-48ef-a209-664cb800f9c7 tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "cfaee7e2-6929-4d8c-8614-e19e0055f2fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.217s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.871388] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.871388] env[68279]: DEBUG nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Instance network_info: |[{"id": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "address": "fa:16:3e:16:47:e2", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1665daa6-4f", "ovs_interfaceid": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1080.871388] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:47:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd19577c9-1b2e-490b-8031-2f278dd3f570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1665daa6-4f83-44e0-8f73-d3ccc3eddb5f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.879068] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.879641] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.879878] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0557d228-e199-4423-91a4-e334f4905fae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.903651] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.903651] env[68279]: value = "task-2963739" [ 1080.903651] env[68279]: _type = "Task" [ 1080.903651] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.914500] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963739, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.916078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.098142] env[68279]: DEBUG nova.compute.manager [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Received event network-changed-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.098142] env[68279]: DEBUG nova.compute.manager [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Refreshing instance network info cache due to event network-changed-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1081.098142] env[68279]: DEBUG oslo_concurrency.lockutils [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] Acquiring lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.099284] env[68279]: DEBUG oslo_concurrency.lockutils [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] Acquired lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.099284] env[68279]: DEBUG nova.network.neutron [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Refreshing network info cache for port 1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.316384] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "fe92e176-222c-4c46-a254-1c12e21c68d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.316639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.316885] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "fe92e176-222c-4c46-a254-1c12e21c68d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.317103] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.317280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.321321] env[68279]: INFO nova.compute.manager [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Terminating instance [ 1081.416747] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963739, 'name': CreateVM_Task, 'duration_secs': 0.347704} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.420036] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1081.421681] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.422234] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.422582] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1081.423192] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93322850-fd25-4e0c-9b3e-ccfb24f5e313 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.430307] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1081.430307] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5222a14f-aaea-13b3-c85f-453cbb9978d1" [ 1081.430307] env[68279]: _type = "Task" [ 1081.430307] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.440697] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5222a14f-aaea-13b3-c85f-453cbb9978d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.658634] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e994d7aa-7c0f-4dde-ad06-9164b5588daa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.667420] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3633e0f2-41bc-43f8-9c76-22e9671c2dc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.704732] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094eb51c-0c99-410a-ba71-4f2a40a25768 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.711729] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a676e6d2-b9a2-4e50-8c59-cdfcb66ef8ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.725536] env[68279]: DEBUG nova.compute.provider_tree [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.827705] env[68279]: DEBUG nova.compute.manager [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1081.827923] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.828862] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4b7ff5-cf1a-4ab2-adde-60f3338f0b5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.838595] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.838922] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef68048f-33d4-482b-ae6b-1d6b66a60043 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.840806] env[68279]: DEBUG nova.network.neutron [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updated VIF entry in instance network info cache for port 1665daa6-4f83-44e0-8f73-d3ccc3eddb5f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1081.841174] env[68279]: DEBUG nova.network.neutron [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updating instance_info_cache with network_info: [{"id": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "address": "fa:16:3e:16:47:e2", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1665daa6-4f", "ovs_interfaceid": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.847882] env[68279]: DEBUG oslo_vmware.api [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1081.847882] env[68279]: value = "task-2963740" [ 1081.847882] env[68279]: _type = "Task" [ 1081.847882] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.857502] env[68279]: DEBUG oslo_vmware.api [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.940245] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5222a14f-aaea-13b3-c85f-453cbb9978d1, 'name': SearchDatastore_Task, 'duration_secs': 0.011419} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.940593] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.941023] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.941207] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.941354] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.941492] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.941761] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e52ac2c1-1c1d-412d-824c-dc315ebd6c1c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.951449] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.951629] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1081.952388] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe517a62-27c4-48ab-bb3b-9c216a8e1f94 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.957945] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1081.957945] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529fa727-b25a-cd07-ba3a-20968e305f04" [ 1081.957945] env[68279]: _type = "Task" [ 1081.957945] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.966473] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529fa727-b25a-cd07-ba3a-20968e305f04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.231117] env[68279]: DEBUG nova.scheduler.client.report [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.344387] env[68279]: DEBUG oslo_concurrency.lockutils [req-14412f82-a053-492c-83ac-305d13032e9a req-b1266a42-ffd9-4ff9-8e62-8e9b801d1e66 service nova] Releasing lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.357934] env[68279]: DEBUG oslo_vmware.api [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963740, 'name': PowerOffVM_Task, 'duration_secs': 0.285327} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.358866] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.358866] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.358866] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a3aa465-316b-4e85-92c4-d9c421def64b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.438533] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.438754] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.438945] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleting the datastore file [datastore1] fe92e176-222c-4c46-a254-1c12e21c68d0 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.439226] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2771bb2e-ba65-4c6d-9673-3eb48dae52d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.446910] env[68279]: DEBUG oslo_vmware.api [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for the task: (returnval){ [ 1082.446910] env[68279]: value = "task-2963742" [ 1082.446910] env[68279]: _type = "Task" [ 1082.446910] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.454971] env[68279]: DEBUG oslo_vmware.api [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.466655] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529fa727-b25a-cd07-ba3a-20968e305f04, 'name': SearchDatastore_Task, 'duration_secs': 0.010317} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.467436] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37891f0c-432b-4e24-a535-4b9fe42409c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.472359] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1082.472359] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5266ae32-3350-fcb1-48f9-962bbee42d53" [ 1082.472359] env[68279]: _type = "Task" [ 1082.472359] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.479780] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5266ae32-3350-fcb1-48f9-962bbee42d53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.735780] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.736392] env[68279]: DEBUG nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1082.740596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.301s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.741424] env[68279]: INFO nova.compute.claims [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1082.957603] env[68279]: DEBUG oslo_vmware.api [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Task: {'id': task-2963742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155738} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.957969] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.958084] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1082.958270] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.958439] env[68279]: INFO nova.compute.manager [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1082.958671] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.958869] env[68279]: DEBUG nova.compute.manager [-] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1082.958973] env[68279]: DEBUG nova.network.neutron [-] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1082.982037] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5266ae32-3350-fcb1-48f9-962bbee42d53, 'name': SearchDatastore_Task, 'duration_secs': 0.008688} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.982317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.982571] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e3da334a-1dfc-41d8-8ba8-aabe53924bdc/e3da334a-1dfc-41d8-8ba8-aabe53924bdc.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.982812] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dbe2d27-992c-4ef4-bb49-36686a126cc8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.989619] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1082.989619] env[68279]: value = "task-2963743" [ 1082.989619] env[68279]: _type = "Task" [ 1082.989619] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.997666] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.240161] env[68279]: DEBUG nova.compute.manager [req-97a1d6f5-bb4a-4a4e-a53e-8582faf006df req-e1ec187f-e3f7-4dc4-a7b4-61d90bfbfe5f service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Received event network-vif-deleted-d8067a6f-39fd-42be-8f8e-23d5dea92c8b {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.240556] env[68279]: INFO nova.compute.manager [req-97a1d6f5-bb4a-4a4e-a53e-8582faf006df req-e1ec187f-e3f7-4dc4-a7b4-61d90bfbfe5f service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Neutron deleted interface d8067a6f-39fd-42be-8f8e-23d5dea92c8b; detaching it from the instance and deleting it from the info cache [ 1083.241024] env[68279]: DEBUG nova.network.neutron [req-97a1d6f5-bb4a-4a4e-a53e-8582faf006df req-e1ec187f-e3f7-4dc4-a7b4-61d90bfbfe5f service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.244913] env[68279]: DEBUG nova.compute.utils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1083.248388] env[68279]: DEBUG nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1083.248388] env[68279]: DEBUG nova.network.neutron [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1083.299875] env[68279]: DEBUG nova.policy [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4d9c39cb0a142eab4370307dd41cf0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd517424aba641e4b867e440ba0ee7ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1083.500066] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963743, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478487} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.500285] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e3da334a-1dfc-41d8-8ba8-aabe53924bdc/e3da334a-1dfc-41d8-8ba8-aabe53924bdc.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.500493] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.500796] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dadede4-3bb9-4df9-bd4f-48ee8c9b17eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.509175] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1083.509175] env[68279]: value = "task-2963744" [ 1083.509175] env[68279]: _type = "Task" [ 1083.509175] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.519423] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.610621] env[68279]: DEBUG nova.network.neutron [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Successfully created port: 45600165-3cae-4f5f-a5ab-9c71c5d7035d {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1083.713800] env[68279]: DEBUG nova.network.neutron [-] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.748080] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d8cf83b-1e7f-4ab8-b4c1-f620a9dea31b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.753527] env[68279]: DEBUG nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1083.766810] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9adf5ff-ca91-479f-94a9-bbb1d92e944e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.810737] env[68279]: DEBUG nova.compute.manager [req-97a1d6f5-bb4a-4a4e-a53e-8582faf006df req-e1ec187f-e3f7-4dc4-a7b4-61d90bfbfe5f service nova] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Detach interface failed, port_id=d8067a6f-39fd-42be-8f8e-23d5dea92c8b, reason: Instance fe92e176-222c-4c46-a254-1c12e21c68d0 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1084.014806] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bb7e01-368d-4c80-90e0-81da33224942 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.026043] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32769ae8-8225-4b5b-8214-82d009b9dc4a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.029363] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069289} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.029703] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1084.030730] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd67ada-f024-4edf-8d69-050f913b265b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.061488] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18923d95-a448-430b-be83-3e7ea902aed1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.081156] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] e3da334a-1dfc-41d8-8ba8-aabe53924bdc/e3da334a-1dfc-41d8-8ba8-aabe53924bdc.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.082164] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-571b833e-aef9-45ad-a4b8-9a5eba86a5e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.102795] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ab96d9-8fb5-4f82-9518-2964cf11ce67 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.109021] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1084.109021] env[68279]: value = "task-2963745" [ 1084.109021] env[68279]: _type = "Task" [ 1084.109021] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.117053] env[68279]: DEBUG nova.compute.provider_tree [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.127653] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.216293] env[68279]: INFO nova.compute.manager [-] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Took 1.26 seconds to deallocate network for instance. [ 1084.622749] env[68279]: DEBUG nova.scheduler.client.report [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.629246] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963745, 'name': ReconfigVM_Task, 'duration_secs': 0.268534} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.629575] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Reconfigured VM instance instance-00000065 to attach disk [datastore1] e3da334a-1dfc-41d8-8ba8-aabe53924bdc/e3da334a-1dfc-41d8-8ba8-aabe53924bdc.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.630166] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3356355b-bda5-4f97-8901-889b7a820d79 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.637646] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1084.637646] env[68279]: value = "task-2963746" [ 1084.637646] env[68279]: _type = "Task" [ 1084.637646] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.648218] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963746, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.722906] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.763366] env[68279]: DEBUG nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1084.790070] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1084.790325] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1084.790480] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1084.790660] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1084.790807] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1084.790956] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1084.791224] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1084.791391] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1084.791560] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1084.791724] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1084.791895] env[68279]: DEBUG nova.virt.hardware [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1084.792805] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b55433-0076-45c4-8b6a-1d77a23a2ccc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.801982] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10667f9-efe6-45f1-80dd-f78ba8422501 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.109222] env[68279]: DEBUG nova.network.neutron [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Successfully updated port: 45600165-3cae-4f5f-a5ab-9c71c5d7035d {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1085.130663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.131289] env[68279]: DEBUG nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1085.134168] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.409s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.134358] env[68279]: DEBUG nova.objects.instance [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1085.150038] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963746, 'name': Rename_Task, 'duration_secs': 0.153047} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.150515] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.150515] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d2a9f12-2aba-4451-aa8e-df03432a31f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.158203] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1085.158203] env[68279]: value = "task-2963747" [ 1085.158203] env[68279]: _type = "Task" [ 1085.158203] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.168769] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.265270] env[68279]: DEBUG nova.compute.manager [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Received event network-vif-plugged-45600165-3cae-4f5f-a5ab-9c71c5d7035d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.265505] env[68279]: DEBUG oslo_concurrency.lockutils [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] Acquiring lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.265719] env[68279]: DEBUG oslo_concurrency.lockutils [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.265885] env[68279]: DEBUG oslo_concurrency.lockutils [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.266066] env[68279]: DEBUG nova.compute.manager [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] No waiting events found dispatching network-vif-plugged-45600165-3cae-4f5f-a5ab-9c71c5d7035d {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1085.266235] env[68279]: WARNING nova.compute.manager [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Received unexpected event network-vif-plugged-45600165-3cae-4f5f-a5ab-9c71c5d7035d for instance with vm_state building and task_state spawning. [ 1085.266393] env[68279]: DEBUG nova.compute.manager [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Received event network-changed-45600165-3cae-4f5f-a5ab-9c71c5d7035d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.266546] env[68279]: DEBUG nova.compute.manager [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Refreshing instance network info cache due to event network-changed-45600165-3cae-4f5f-a5ab-9c71c5d7035d. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1085.266726] env[68279]: DEBUG oslo_concurrency.lockutils [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] Acquiring lock "refresh_cache-6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.266861] env[68279]: DEBUG oslo_concurrency.lockutils [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] Acquired lock "refresh_cache-6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.267028] env[68279]: DEBUG nova.network.neutron [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Refreshing network info cache for port 45600165-3cae-4f5f-a5ab-9c71c5d7035d {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1085.571894] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1085.572183] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1085.613564] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.638361] env[68279]: DEBUG nova.compute.utils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1085.643077] env[68279]: DEBUG nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1085.643077] env[68279]: DEBUG nova.network.neutron [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1085.667396] env[68279]: DEBUG oslo_vmware.api [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963747, 'name': PowerOnVM_Task, 'duration_secs': 0.495096} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.667647] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.667846] env[68279]: INFO nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Took 7.27 seconds to spawn the instance on the hypervisor. [ 1085.668034] env[68279]: DEBUG nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.668770] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c49b41-84c1-46fe-9248-8eb9a78905cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.680066] env[68279]: DEBUG nova.policy [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36b39430ec184c72b8950247fd1added', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37b1b1fd2ea44d83b954e5b90ae9e3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1085.797390] env[68279]: DEBUG nova.network.neutron [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.864232] env[68279]: DEBUG nova.network.neutron [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.921783] env[68279]: DEBUG nova.network.neutron [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Successfully created port: 87277960-0dc5-4e95-a4ac-6542504f83ee {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.079375] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.079375] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.079375] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.079375] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.079375] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.079526] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.080365] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1086.080365] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.149986] env[68279]: DEBUG oslo_concurrency.lockutils [None req-bbe5982d-d7db-4492-a59b-251d004b7a98 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.150325] env[68279]: DEBUG nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1086.156130] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 11.293s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.189359] env[68279]: INFO nova.compute.manager [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Took 30.36 seconds to build instance. [ 1086.367491] env[68279]: DEBUG oslo_concurrency.lockutils [req-5145c5d0-5908-41fe-bd0d-e239d94feaa7 req-5c90f66b-186f-4409-9ba0-80400304676d service nova] Releasing lock "refresh_cache-6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.367829] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.367995] env[68279]: DEBUG nova.network.neutron [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1086.582569] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.690012] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3187320f-1d6e-41c8-8372-a5f5d576c400 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.871s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.834668] env[68279]: DEBUG nova.compute.manager [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Received event network-changed-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1086.834886] env[68279]: DEBUG nova.compute.manager [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Refreshing instance network info cache due to event network-changed-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1086.835145] env[68279]: DEBUG oslo_concurrency.lockutils [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] Acquiring lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.835383] env[68279]: DEBUG oslo_concurrency.lockutils [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] Acquired lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.835512] env[68279]: DEBUG nova.network.neutron [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Refreshing network info cache for port 1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.889197] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af942266-001c-4930-9fbf-0f72b3cece7d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.898102] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2a72ed-6af6-436c-8273-df649465c940 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.901773] env[68279]: DEBUG nova.network.neutron [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1086.936013] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0c90d5-523d-40ba-8ac0-6e1146a646bc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.944893] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d54361e-16b1-4b6e-9d34-a6a33125d797 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.959532] env[68279]: DEBUG nova.compute.provider_tree [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.054064] env[68279]: DEBUG nova.network.neutron [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Updating instance_info_cache with network_info: [{"id": "45600165-3cae-4f5f-a5ab-9c71c5d7035d", "address": "fa:16:3e:19:7c:24", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45600165-3c", "ovs_interfaceid": "45600165-3cae-4f5f-a5ab-9c71c5d7035d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.167232] env[68279]: DEBUG nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1087.195217] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.195514] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.195682] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.195862] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.196020] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.196166] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.196375] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.196631] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.196884] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.197570] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.197798] env[68279]: DEBUG nova.virt.hardware [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.198988] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3085befb-d46b-48ed-ae3b-358593f632aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.208019] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fe2a1f-a0fc-42eb-83ee-c6c22c22c85c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.354797] env[68279]: DEBUG nova.compute.manager [req-73a2cd61-b319-4b33-8dd9-b0274978f450 req-9727ca0d-6187-4420-a486-5893714add2f service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Received event network-vif-plugged-87277960-0dc5-4e95-a4ac-6542504f83ee {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1087.355081] env[68279]: DEBUG oslo_concurrency.lockutils [req-73a2cd61-b319-4b33-8dd9-b0274978f450 req-9727ca0d-6187-4420-a486-5893714add2f service nova] Acquiring lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.355302] env[68279]: DEBUG oslo_concurrency.lockutils [req-73a2cd61-b319-4b33-8dd9-b0274978f450 req-9727ca0d-6187-4420-a486-5893714add2f service nova] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.355488] env[68279]: DEBUG oslo_concurrency.lockutils [req-73a2cd61-b319-4b33-8dd9-b0274978f450 req-9727ca0d-6187-4420-a486-5893714add2f service nova] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.355714] env[68279]: DEBUG nova.compute.manager [req-73a2cd61-b319-4b33-8dd9-b0274978f450 req-9727ca0d-6187-4420-a486-5893714add2f service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] No waiting events found dispatching network-vif-plugged-87277960-0dc5-4e95-a4ac-6542504f83ee {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1087.355816] env[68279]: WARNING nova.compute.manager [req-73a2cd61-b319-4b33-8dd9-b0274978f450 req-9727ca0d-6187-4420-a486-5893714add2f service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Received unexpected event network-vif-plugged-87277960-0dc5-4e95-a4ac-6542504f83ee for instance with vm_state building and task_state spawning. [ 1087.437988] env[68279]: DEBUG nova.network.neutron [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Successfully updated port: 87277960-0dc5-4e95-a4ac-6542504f83ee {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1087.462674] env[68279]: DEBUG nova.scheduler.client.report [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.558078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.558415] env[68279]: DEBUG nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Instance network_info: |[{"id": "45600165-3cae-4f5f-a5ab-9c71c5d7035d", "address": "fa:16:3e:19:7c:24", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45600165-3c", "ovs_interfaceid": "45600165-3cae-4f5f-a5ab-9c71c5d7035d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1087.558832] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:7c:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55764410-260e-4339-a020-6b30995584bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45600165-3cae-4f5f-a5ab-9c71c5d7035d', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.567220] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.567442] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.567665] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53cf3a32-3dc8-4f5e-b0a1-e883cecdca2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.593484] env[68279]: DEBUG nova.network.neutron [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updated VIF entry in instance network info cache for port 1665daa6-4f83-44e0-8f73-d3ccc3eddb5f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.593745] env[68279]: DEBUG nova.network.neutron [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updating instance_info_cache with network_info: [{"id": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "address": "fa:16:3e:16:47:e2", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1665daa6-4f", "ovs_interfaceid": "1665daa6-4f83-44e0-8f73-d3ccc3eddb5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.601946] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.601946] env[68279]: value = "task-2963748" [ 1087.601946] env[68279]: _type = "Task" [ 1087.601946] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.940027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "refresh_cache-66b2ce98-9a8a-4344-bd7d-80b7fa001344" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.940217] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "refresh_cache-66b2ce98-9a8a-4344-bd7d-80b7fa001344" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.940426] env[68279]: DEBUG nova.network.neutron [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1088.096770] env[68279]: DEBUG oslo_concurrency.lockutils [req-df770873-13b4-448c-85e3-3eaf70574fe6 req-2d7a1a21-3a24-45a5-a521-1a9b243d741e service nova] Releasing lock "refresh_cache-e3da334a-1dfc-41d8-8ba8-aabe53924bdc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.112582] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963748, 'name': CreateVM_Task, 'duration_secs': 0.375143} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.112741] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.113490] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.113659] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.113986] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.114252] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c169107-e24c-417d-96e4-d33895c76e28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.119237] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1088.119237] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522bd10c-328f-f7c4-1476-ee691e444a44" [ 1088.119237] env[68279]: _type = "Task" [ 1088.119237] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.127331] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522bd10c-328f-f7c4-1476-ee691e444a44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.473980] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.318s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.477028] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.033s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.478313] env[68279]: INFO nova.compute.claims [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1088.486118] env[68279]: DEBUG nova.network.neutron [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1088.608773] env[68279]: DEBUG nova.network.neutron [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Updating instance_info_cache with network_info: [{"id": "87277960-0dc5-4e95-a4ac-6542504f83ee", "address": "fa:16:3e:18:d3:3a", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87277960-0d", "ovs_interfaceid": "87277960-0dc5-4e95-a4ac-6542504f83ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.631998] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522bd10c-328f-f7c4-1476-ee691e444a44, 'name': SearchDatastore_Task, 'duration_secs': 0.010844} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.632555] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.632793] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.633032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.633187] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.633363] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.633614] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18d66b5a-c5f8-4c2c-811c-0a45097e542e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.642736] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.642901] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1088.643683] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0200486-af42-4c85-a51a-e76199b14f5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.649114] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1088.649114] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524c6c21-1f49-acf3-6c4a-1ebf02325b45" [ 1088.649114] env[68279]: _type = "Task" [ 1088.649114] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.659207] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524c6c21-1f49-acf3-6c4a-1ebf02325b45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.041038] env[68279]: INFO nova.scheduler.client.report [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted allocation for migration 70f75b68-4652-4b55-a1d2-21a134e0012d [ 1089.110982] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "refresh_cache-66b2ce98-9a8a-4344-bd7d-80b7fa001344" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.111410] env[68279]: DEBUG nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Instance network_info: |[{"id": "87277960-0dc5-4e95-a4ac-6542504f83ee", "address": "fa:16:3e:18:d3:3a", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87277960-0d", "ovs_interfaceid": "87277960-0dc5-4e95-a4ac-6542504f83ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1089.111817] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:d3:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87277960-0dc5-4e95-a4ac-6542504f83ee', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1089.119955] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.120200] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1089.120702] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98088811-40cc-4130-8f4d-cea0a90024b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.141984] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1089.141984] env[68279]: value = "task-2963749" [ 1089.141984] env[68279]: _type = "Task" [ 1089.141984] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.152485] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963749, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.161539] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524c6c21-1f49-acf3-6c4a-1ebf02325b45, 'name': SearchDatastore_Task, 'duration_secs': 0.009393} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.162375] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-419d110f-a2c7-46c4-b69c-18700f864dfb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.169234] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1089.169234] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f0c8c5-2811-c98d-f978-a39697acc638" [ 1089.169234] env[68279]: _type = "Task" [ 1089.169234] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.178124] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f0c8c5-2811-c98d-f978-a39697acc638, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.386276] env[68279]: DEBUG nova.compute.manager [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Received event network-changed-87277960-0dc5-4e95-a4ac-6542504f83ee {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1089.386472] env[68279]: DEBUG nova.compute.manager [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Refreshing instance network info cache due to event network-changed-87277960-0dc5-4e95-a4ac-6542504f83ee. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1089.386696] env[68279]: DEBUG oslo_concurrency.lockutils [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] Acquiring lock "refresh_cache-66b2ce98-9a8a-4344-bd7d-80b7fa001344" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.386975] env[68279]: DEBUG oslo_concurrency.lockutils [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] Acquired lock "refresh_cache-66b2ce98-9a8a-4344-bd7d-80b7fa001344" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.387166] env[68279]: DEBUG nova.network.neutron [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Refreshing network info cache for port 87277960-0dc5-4e95-a4ac-6542504f83ee {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.545956] env[68279]: DEBUG oslo_concurrency.lockutils [None req-153a8cac-4b87-4720-9346-23c2f3f031e1 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 18.181s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.653048] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963749, 'name': CreateVM_Task, 'duration_secs': 0.345183} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.653229] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1089.653953] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.654234] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.654464] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1089.654719] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5273a37d-3082-48b7-81d0-402eec5d26a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.660464] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1089.660464] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252b188-97c9-023e-e9b8-d7a26ce611cc" [ 1089.660464] env[68279]: _type = "Task" [ 1089.660464] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.673358] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252b188-97c9-023e-e9b8-d7a26ce611cc, 'name': SearchDatastore_Task, 'duration_secs': 0.010618} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.676429] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.676671] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.676884] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.686695] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f0c8c5-2811-c98d-f978-a39697acc638, 'name': SearchDatastore_Task, 'duration_secs': 0.011554} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.686953] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.687244] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5/6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1089.687555] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.687745] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.687977] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51b0ae6b-c6f9-46d3-a6b0-2d77a0fc723d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.691148] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67b897a7-ab68-44e5-bb1f-acad2f33c2f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.694780] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28b312b-d6f1-42b1-be6e-74e86cdc6531 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.705957] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1da556-666a-4985-baa1-b107af3d9086 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.709150] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1089.709150] env[68279]: value = "task-2963750" [ 1089.709150] env[68279]: _type = "Task" [ 1089.709150] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.710447] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.710629] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.714995] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d2fbb3-9d61-4958-b0ad-7abb82115b68 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.742623] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8d3d83-53ff-4ea7-ab88-26a16e7a17ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.749443] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963750, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.749621] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1089.749621] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524aa701-33ce-c175-6243-132701d89b5d" [ 1089.749621] env[68279]: _type = "Task" [ 1089.749621] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.756994] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b5c729-b1ac-4d9a-b28f-ab1b98cc10e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.764457] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524aa701-33ce-c175-6243-132701d89b5d, 'name': SearchDatastore_Task, 'duration_secs': 0.010501} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.765666] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6860dd40-89e1-481b-9ead-cb20765e5b3e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.775964] env[68279]: DEBUG nova.compute.provider_tree [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.783836] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1089.783836] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52422f7a-dd8d-eb94-e8e0-9636ba566266" [ 1089.783836] env[68279]: _type = "Task" [ 1089.783836] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.792530] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52422f7a-dd8d-eb94-e8e0-9636ba566266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.220753] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963750, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476675} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.223892] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5/6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.223892] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.223892] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aab4e6f5-a4d9-4ffa-9907-8feb5fa3d3ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.229485] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1090.229485] env[68279]: value = "task-2963751" [ 1090.229485] env[68279]: _type = "Task" [ 1090.229485] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.236740] env[68279]: DEBUG nova.network.neutron [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Updated VIF entry in instance network info cache for port 87277960-0dc5-4e95-a4ac-6542504f83ee. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.237751] env[68279]: DEBUG nova.network.neutron [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Updating instance_info_cache with network_info: [{"id": "87277960-0dc5-4e95-a4ac-6542504f83ee", "address": "fa:16:3e:18:d3:3a", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87277960-0d", "ovs_interfaceid": "87277960-0dc5-4e95-a4ac-6542504f83ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.241953] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963751, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.279542] env[68279]: DEBUG nova.scheduler.client.report [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1090.294621] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52422f7a-dd8d-eb94-e8e0-9636ba566266, 'name': SearchDatastore_Task, 'duration_secs': 0.01019} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.294902] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.295781] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 66b2ce98-9a8a-4344-bd7d-80b7fa001344/66b2ce98-9a8a-4344-bd7d-80b7fa001344.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.296173] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0628f26-6697-4a7f-9f41-4d9eb3240839 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.304801] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1090.304801] env[68279]: value = "task-2963752" [ 1090.304801] env[68279]: _type = "Task" [ 1090.304801] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.313856] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.346981] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.347259] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.347622] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.348606] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.348606] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.350552] env[68279]: INFO nova.compute.manager [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Terminating instance [ 1090.740887] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963751, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0653} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.741245] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1090.742092] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d2939a-0a86-47d9-807f-4c1f7bc54b41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.744966] env[68279]: DEBUG oslo_concurrency.lockutils [req-65bca2d7-fbe0-4a3c-ba70-4d097c96be73 req-f2db11b3-fbbe-41b6-95c8-5d5f13ef23a2 service nova] Releasing lock "refresh_cache-66b2ce98-9a8a-4344-bd7d-80b7fa001344" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.765622] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5/6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.765880] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72566a99-1817-4cca-bc61-9353d50ea83b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.783988] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.307s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.784603] env[68279]: DEBUG nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1090.789086] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.940s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.790447] env[68279]: INFO nova.compute.claims [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1090.793645] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1090.793645] env[68279]: value = "task-2963753" [ 1090.793645] env[68279]: _type = "Task" [ 1090.793645] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.802276] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963753, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.814727] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963752, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434549} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.815132] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 66b2ce98-9a8a-4344-bd7d-80b7fa001344/66b2ce98-9a8a-4344-bd7d-80b7fa001344.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.815300] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.815506] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-897dfb42-2471-4e6d-8773-a01cb739f9fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.823818] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1090.823818] env[68279]: value = "task-2963754" [ 1090.823818] env[68279]: _type = "Task" [ 1090.823818] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.835222] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963754, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.853638] env[68279]: DEBUG nova.compute.manager [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.853931] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.854863] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37080bc-c58c-4aec-8ed4-7e6f6b63f595 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.863836] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.864158] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79b18bf0-1d80-4340-ad5f-ef8083ff3cb8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.871569] env[68279]: DEBUG oslo_vmware.api [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1090.871569] env[68279]: value = "task-2963755" [ 1090.871569] env[68279]: _type = "Task" [ 1090.871569] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.882826] env[68279]: DEBUG oslo_vmware.api [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.943049] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "778efb81-2562-4d55-ace0-09722d92fa5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.943332] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "778efb81-2562-4d55-ace0-09722d92fa5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.289942] env[68279]: DEBUG nova.compute.utils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1091.292060] env[68279]: DEBUG nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1091.292281] env[68279]: DEBUG nova.network.neutron [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1091.308230] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963753, 'name': ReconfigVM_Task, 'duration_secs': 0.274284} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.308565] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5/6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.309469] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55a1cc24-72f5-4ab4-a8a6-e570304899c5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.317356] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1091.317356] env[68279]: value = "task-2963756" [ 1091.317356] env[68279]: _type = "Task" [ 1091.317356] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.328073] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963756, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.338078] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963754, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067316} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.338395] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.339448] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0aa9491-f104-499d-b7eb-ee4da4450025 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.364780] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 66b2ce98-9a8a-4344-bd7d-80b7fa001344/66b2ce98-9a8a-4344-bd7d-80b7fa001344.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.366580] env[68279]: DEBUG nova.policy [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94e9d9c1927948f5bb8f42235b09f008', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d7a90a39b864e3e985b3b828c3fd363', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1091.368129] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1276ab7b-166c-48dc-8d70-0623e3227bec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.394113] env[68279]: DEBUG oslo_vmware.api [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963755, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.394113] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1091.394113] env[68279]: value = "task-2963757" [ 1091.394113] env[68279]: _type = "Task" [ 1091.394113] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.402730] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963757, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.445907] env[68279]: DEBUG nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1091.723726] env[68279]: DEBUG nova.network.neutron [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Successfully created port: 80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.801088] env[68279]: DEBUG nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1091.828816] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963756, 'name': Rename_Task, 'duration_secs': 0.171808} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.828996] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1091.829233] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ead1f16-36b1-4635-af28-1214df4751d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.837319] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1091.837319] env[68279]: value = "task-2963758" [ 1091.837319] env[68279]: _type = "Task" [ 1091.837319] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.845689] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.898638] env[68279]: DEBUG oslo_vmware.api [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963755, 'name': PowerOffVM_Task, 'duration_secs': 0.657309} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.903019] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1091.903019] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1091.903019] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd00d6ff-6d38-4cbd-85f6-a31256cf3add {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.911058] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963757, 'name': ReconfigVM_Task, 'duration_secs': 0.315299} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.911992] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 66b2ce98-9a8a-4344-bd7d-80b7fa001344/66b2ce98-9a8a-4344-bd7d-80b7fa001344.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.912462] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d072162b-7788-4f49-b13f-a9dfd91d47a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.922283] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1091.922283] env[68279]: value = "task-2963760" [ 1091.922283] env[68279]: _type = "Task" [ 1091.922283] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.933228] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963760, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.972581] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.976401] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.976718] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.976961] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleting the datastore file [datastore2] a96ea5b4-39c5-4a24-873f-54480f876fbf {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.977634] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef9b9092-a42c-4b30-9cde-c492807094a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.987508] env[68279]: DEBUG oslo_vmware.api [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1091.987508] env[68279]: value = "task-2963761" [ 1091.987508] env[68279]: _type = "Task" [ 1091.987508] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.998055] env[68279]: DEBUG oslo_vmware.api [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.080037] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca031075-d3e8-4dda-ba27-7ce4700c30c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.087633] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61193f9e-c9f6-4ec0-83cd-99a8170ae801 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.119418] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72119bfa-2104-49e5-ac70-863d85dc5a71 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.128081] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d952ce2-c592-4460-82a6-3f40766cbbe9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.143375] env[68279]: DEBUG nova.compute.provider_tree [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.349709] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963758, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.434074] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963760, 'name': Rename_Task, 'duration_secs': 0.264313} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.434383] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1092.434639] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa685d8d-4139-405a-8e50-28632a93f846 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.441940] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1092.441940] env[68279]: value = "task-2963762" [ 1092.441940] env[68279]: _type = "Task" [ 1092.441940] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.450482] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.498448] env[68279]: DEBUG oslo_vmware.api [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24999} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.498695] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.498880] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.499096] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.499249] env[68279]: INFO nova.compute.manager [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1092.499509] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.500063] env[68279]: DEBUG nova.compute.manager [-] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.500063] env[68279]: DEBUG nova.network.neutron [-] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.649713] env[68279]: DEBUG nova.scheduler.client.report [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1092.812709] env[68279]: DEBUG nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1092.846179] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.846685] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.846972] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.847362] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.847702] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.851018] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.851018] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.851018] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.851018] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.851018] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.851018] env[68279]: DEBUG nova.virt.hardware [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.851018] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9135be8f-c056-4af7-929f-cf541319a4cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.860957] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963758, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.862563] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1179998-181b-49ab-aab5-2c96f96b6f8e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.956547] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963762, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.015189] env[68279]: DEBUG nova.compute.manager [req-ca8b2f5b-20cd-40f0-acdb-9dd51f0de82d req-3b7c3988-c23d-4d83-b9e1-7da933e6eefd service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Received event network-vif-deleted-38ecf3bb-21fe-4683-8cc4-40e133bebe1f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.015189] env[68279]: INFO nova.compute.manager [req-ca8b2f5b-20cd-40f0-acdb-9dd51f0de82d req-3b7c3988-c23d-4d83-b9e1-7da933e6eefd service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Neutron deleted interface 38ecf3bb-21fe-4683-8cc4-40e133bebe1f; detaching it from the instance and deleting it from the info cache [ 1093.016171] env[68279]: DEBUG nova.network.neutron [req-ca8b2f5b-20cd-40f0-acdb-9dd51f0de82d req-3b7c3988-c23d-4d83-b9e1-7da933e6eefd service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.158795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.158795] env[68279]: DEBUG nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1093.164890] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.249s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.167261] env[68279]: INFO nova.compute.claims [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.285033] env[68279]: DEBUG nova.network.neutron [-] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.316347] env[68279]: DEBUG nova.compute.manager [req-93e5a895-cd85-4a18-94d5-a26a5c2c4d6d req-c2854735-36fa-46ab-919e-75c82afdc06d service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Received event network-vif-plugged-80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.316677] env[68279]: DEBUG oslo_concurrency.lockutils [req-93e5a895-cd85-4a18-94d5-a26a5c2c4d6d req-c2854735-36fa-46ab-919e-75c82afdc06d service nova] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.316995] env[68279]: DEBUG oslo_concurrency.lockutils [req-93e5a895-cd85-4a18-94d5-a26a5c2c4d6d req-c2854735-36fa-46ab-919e-75c82afdc06d service nova] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.317647] env[68279]: DEBUG oslo_concurrency.lockutils [req-93e5a895-cd85-4a18-94d5-a26a5c2c4d6d req-c2854735-36fa-46ab-919e-75c82afdc06d service nova] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.318962] env[68279]: DEBUG nova.compute.manager [req-93e5a895-cd85-4a18-94d5-a26a5c2c4d6d req-c2854735-36fa-46ab-919e-75c82afdc06d service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] No waiting events found dispatching network-vif-plugged-80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1093.318962] env[68279]: WARNING nova.compute.manager [req-93e5a895-cd85-4a18-94d5-a26a5c2c4d6d req-c2854735-36fa-46ab-919e-75c82afdc06d service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Received unexpected event network-vif-plugged-80e209dd-e4b0-4331-87a6-92e23bdfa270 for instance with vm_state building and task_state spawning. [ 1093.349779] env[68279]: DEBUG oslo_vmware.api [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963758, 'name': PowerOnVM_Task, 'duration_secs': 1.196207} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.353020] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1093.353020] env[68279]: INFO nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1093.353020] env[68279]: DEBUG nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.353020] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c69090b-7e84-4a8c-b21a-01afd99c2a74 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.413992] env[68279]: DEBUG nova.network.neutron [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Successfully updated port: 80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.454023] env[68279]: DEBUG oslo_vmware.api [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963762, 'name': PowerOnVM_Task, 'duration_secs': 0.692987} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.455038] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1093.455603] env[68279]: INFO nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Took 6.29 seconds to spawn the instance on the hypervisor. [ 1093.455603] env[68279]: DEBUG nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.456200] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2acbeb6-0fbe-4946-8ebc-f8bbd48e4830 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.518917] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1173b902-0b64-4e34-a4c3-0df29f9f8fa9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.531533] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7142b3a3-d651-4ad6-baa9-4d32f26b1bb7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.564070] env[68279]: DEBUG nova.compute.manager [req-ca8b2f5b-20cd-40f0-acdb-9dd51f0de82d req-3b7c3988-c23d-4d83-b9e1-7da933e6eefd service nova] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Detach interface failed, port_id=38ecf3bb-21fe-4683-8cc4-40e133bebe1f, reason: Instance a96ea5b4-39c5-4a24-873f-54480f876fbf could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1093.673756] env[68279]: DEBUG nova.compute.utils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1093.677509] env[68279]: DEBUG nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1093.677705] env[68279]: DEBUG nova.network.neutron [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1093.720724] env[68279]: DEBUG nova.policy [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4120beef6f2447ca79e7b781203e61e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebe5dfa4da04412980adbfecc868186a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1093.791524] env[68279]: INFO nova.compute.manager [-] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Took 1.29 seconds to deallocate network for instance. [ 1093.878034] env[68279]: INFO nova.compute.manager [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Took 23.13 seconds to build instance. [ 1093.919295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.919371] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.919491] env[68279]: DEBUG nova.network.neutron [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.975570] env[68279]: INFO nova.compute.manager [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Took 22.55 seconds to build instance. [ 1094.178139] env[68279]: DEBUG nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1094.187027] env[68279]: DEBUG nova.network.neutron [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Successfully created port: 57af1b7b-23d4-429e-b620-f1d918c431e5 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.256445] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.256716] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.256892] env[68279]: INFO nova.compute.manager [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Shelving [ 1094.298573] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.379500] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ac7abef7-6dac-4258-ade7-3e9991d24e6e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.645s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.418183] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c49a30-f80e-488b-b029-c842fa437991 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.438386] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fc9322-6c8d-43a4-a5ef-1c3e13849087 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.477235] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9d340aa8-7bbc-4615-9603-762a785f7f21 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.064s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.478480] env[68279]: DEBUG nova.network.neutron [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1094.480844] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13070c16-d58a-4a8e-85ec-d47149cf65ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.490855] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d160a1d-abf4-4a03-aa3b-33d3a672d4e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.509781] env[68279]: DEBUG nova.compute.provider_tree [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.636719] env[68279]: DEBUG nova.network.neutron [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.014186] env[68279]: DEBUG nova.scheduler.client.report [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1095.057407] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ff6d95-5e4b-4111-a896-551c896f1aed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.064105] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8deb1abe-c7aa-495e-a447-99f340ebf107 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Suspending the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1095.064358] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9305eee9-f1a8-48c6-8882-619f930da2d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.071535] env[68279]: DEBUG oslo_vmware.api [None req-8deb1abe-c7aa-495e-a447-99f340ebf107 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1095.071535] env[68279]: value = "task-2963763" [ 1095.071535] env[68279]: _type = "Task" [ 1095.071535] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.079871] env[68279]: DEBUG oslo_vmware.api [None req-8deb1abe-c7aa-495e-a447-99f340ebf107 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963763, 'name': SuspendVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.140046] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.140046] env[68279]: DEBUG nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Instance network_info: |[{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1095.140684] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:6b:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '051f343d-ac4f-4070-a26d-467603122c81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80e209dd-e4b0-4331-87a6-92e23bdfa270', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.148563] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Creating folder: Project (7d7a90a39b864e3e985b3b828c3fd363). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1095.148890] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa08c0a9-c0f3-4df7-b817-f3ccdfc8e16f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.163342] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Created folder: Project (7d7a90a39b864e3e985b3b828c3fd363) in parent group-v594445. [ 1095.163561] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Creating folder: Instances. Parent ref: group-v594725. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1095.163849] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24d728b4-56f9-4ab0-b33f-076c14702c43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.176776] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Created folder: Instances in parent group-v594725. [ 1095.177229] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1095.177558] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1095.177839] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-252e8aa3-511e-444c-a381-c29ae2588f5a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.194157] env[68279]: DEBUG nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1095.203905] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.203905] env[68279]: value = "task-2963766" [ 1095.203905] env[68279]: _type = "Task" [ 1095.203905] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.215034] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963766, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.224066] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1095.224378] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.224575] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1095.224885] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.225065] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1095.225285] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1095.225544] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1095.225714] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1095.225885] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1095.226087] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1095.226309] env[68279]: DEBUG nova.virt.hardware [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1095.227310] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38b3445-6881-486e-92d0-31853f9e7169 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.237442] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975b2cbd-6537-4185-a742-998b33ff50a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.256606] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.256873] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.257104] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.257287] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.257461] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.259923] env[68279]: INFO nova.compute.manager [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Terminating instance [ 1095.268368] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.268616] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb5fca33-df0f-46d8-b956-a04086c1c6f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.276844] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1095.276844] env[68279]: value = "task-2963767" [ 1095.276844] env[68279]: _type = "Task" [ 1095.276844] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.286363] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.351378] env[68279]: DEBUG nova.compute.manager [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Received event network-changed-80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.351378] env[68279]: DEBUG nova.compute.manager [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Refreshing instance network info cache due to event network-changed-80e209dd-e4b0-4331-87a6-92e23bdfa270. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1095.351378] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.351378] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.351378] env[68279]: DEBUG nova.network.neutron [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Refreshing network info cache for port 80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.519224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.520177] env[68279]: DEBUG nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1095.524234] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.801s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.525184] env[68279]: DEBUG nova.objects.instance [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lazy-loading 'resources' on Instance uuid fe92e176-222c-4c46-a254-1c12e21c68d0 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.584813] env[68279]: DEBUG oslo_vmware.api [None req-8deb1abe-c7aa-495e-a447-99f340ebf107 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963763, 'name': SuspendVM_Task} progress is 62%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.714645] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963766, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.767084] env[68279]: DEBUG nova.compute.manager [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.767084] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.767084] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ce6139-8e76-48d3-beae-4756953b2567 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.774663] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.775357] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53d3c231-5cbd-4c93-a0c7-88981081e2f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.787838] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963767, 'name': PowerOffVM_Task, 'duration_secs': 0.389294} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.791064] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.791064] env[68279]: DEBUG oslo_vmware.api [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1095.791064] env[68279]: value = "task-2963768" [ 1095.791064] env[68279]: _type = "Task" [ 1095.791064] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.791064] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e804300-f00a-4627-aeb9-cde1ec693e5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.820310] env[68279]: DEBUG oslo_vmware.api [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963768, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.821216] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8592b3-8f01-4c7a-b959-71ced6d4a905 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.849506] env[68279]: DEBUG nova.network.neutron [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Successfully updated port: 57af1b7b-23d4-429e-b620-f1d918c431e5 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.030715] env[68279]: DEBUG nova.compute.utils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1096.032414] env[68279]: DEBUG nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1096.032607] env[68279]: DEBUG nova.network.neutron [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1096.078856] env[68279]: DEBUG nova.policy [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c7ffc9cd8334404b894cf87c827c92e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd260342c3d534d41b74da099229f4ecd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1096.090405] env[68279]: DEBUG oslo_vmware.api [None req-8deb1abe-c7aa-495e-a447-99f340ebf107 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963763, 'name': SuspendVM_Task, 'duration_secs': 0.679057} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.090686] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8deb1abe-c7aa-495e-a447-99f340ebf107 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Suspended the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1096.090862] env[68279]: DEBUG nova.compute.manager [None req-8deb1abe-c7aa-495e-a447-99f340ebf107 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1096.092788] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984111ef-f5ac-41dd-bf15-948fb1992538 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.109012] env[68279]: DEBUG nova.network.neutron [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updated VIF entry in instance network info cache for port 80e209dd-e4b0-4331-87a6-92e23bdfa270. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1096.109418] env[68279]: DEBUG nova.network.neutron [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.218649] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963766, 'name': CreateVM_Task, 'duration_secs': 0.541878} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.218955] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1096.219711] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.219827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.220164] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1096.220445] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62cc2346-1117-4055-bc05-6a522739c151 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.228293] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1096.228293] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526a9875-c5c0-bf57-cfd7-485ae9cc73d3" [ 1096.228293] env[68279]: _type = "Task" [ 1096.228293] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.239476] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526a9875-c5c0-bf57-cfd7-485ae9cc73d3, 'name': SearchDatastore_Task} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.242153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.242153] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.242153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.242153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.242153] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.242418] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b2d6350-6d62-47d9-9e63-12ba006f0a3e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.252057] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.252057] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.252688] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f421b5b0-a46b-4aa2-adf5-2d809e7b23ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.260303] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1096.260303] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52243aae-88c5-6657-a746-7bd081e8d84f" [ 1096.260303] env[68279]: _type = "Task" [ 1096.260303] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.271164] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52243aae-88c5-6657-a746-7bd081e8d84f, 'name': SearchDatastore_Task, 'duration_secs': 0.009804} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.275091] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47fb8e71-dea7-4d43-8c0f-30fa7dd664fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.285164] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1096.285164] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525fc4ce-083f-5ccf-2d9d-a0129f879ff6" [ 1096.285164] env[68279]: _type = "Task" [ 1096.285164] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.290297] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8618e70-9cbb-469d-b38a-45cfda88f63d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.295712] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525fc4ce-083f-5ccf-2d9d-a0129f879ff6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.301874] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91074ee3-1fc2-4417-a1ed-4917946c049c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.308050] env[68279]: DEBUG oslo_vmware.api [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963768, 'name': PowerOffVM_Task, 'duration_secs': 0.297323} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.309713] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.309937] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.310205] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e0b08b8-6137-4284-82f4-a14c5390efe6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.339401] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1096.342787] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1031d2e1-bf8d-4f22-ab1e-602a14574a9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.342787] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7b78fe-fca4-4f29-a0d5-98c230167c4a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.356207] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "refresh_cache-296358b1-e978-409c-8113-587ae8f806c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.356380] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquired lock "refresh_cache-296358b1-e978-409c-8113-587ae8f806c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.356510] env[68279]: DEBUG nova.network.neutron [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1096.360670] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784ccf4f-e6e1-4a60-a90f-d611e9f4e296 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.368049] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1096.368049] env[68279]: value = "task-2963770" [ 1096.368049] env[68279]: _type = "Task" [ 1096.368049] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.385021] env[68279]: DEBUG nova.compute.provider_tree [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.388950] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963770, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.390408] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.390605] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.390778] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleting the datastore file [datastore1] 66b2ce98-9a8a-4344-bd7d-80b7fa001344 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.391039] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d9357c1-80b5-422b-baf5-aa91c27fd683 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.400163] env[68279]: DEBUG oslo_vmware.api [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1096.400163] env[68279]: value = "task-2963771" [ 1096.400163] env[68279]: _type = "Task" [ 1096.400163] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.404062] env[68279]: DEBUG nova.network.neutron [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1096.417112] env[68279]: DEBUG oslo_vmware.api [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.430115] env[68279]: DEBUG nova.network.neutron [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Successfully created port: 7cbc8ddd-efea-4295-9a79-1bced1aa3082 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1096.536227] env[68279]: DEBUG nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1096.612541] env[68279]: DEBUG oslo_concurrency.lockutils [req-5eeb3477-cfc3-48c1-bc11-d4d3120ea9d4 req-80922780-99f8-4d16-a517-8dc683accdf7 service nova] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.636235] env[68279]: DEBUG nova.network.neutron [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Updating instance_info_cache with network_info: [{"id": "57af1b7b-23d4-429e-b620-f1d918c431e5", "address": "fa:16:3e:04:84:61", "network": {"id": "9b868c4e-a40c-4d58-aae3-2ae167a5d186", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-586340475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebe5dfa4da04412980adbfecc868186a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af1b7b-23", "ovs_interfaceid": "57af1b7b-23d4-429e-b620-f1d918c431e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.796903] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525fc4ce-083f-5ccf-2d9d-a0129f879ff6, 'name': SearchDatastore_Task, 'duration_secs': 0.009399} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.796903] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.797307] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 84b2828a-e62c-45b2-a5ee-067ca66e626b/84b2828a-e62c-45b2-a5ee-067ca66e626b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1096.797307] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-044dce98-4eb3-48ab-a877-ff44286422f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.804851] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1096.804851] env[68279]: value = "task-2963772" [ 1096.804851] env[68279]: _type = "Task" [ 1096.804851] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.813514] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.877533] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963770, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.884750] env[68279]: DEBUG nova.scheduler.client.report [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.899393] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.899703] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.899954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "e3763645-5a78-4929-98a3-108e72071211-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.900180] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.900421] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.903065] env[68279]: INFO nova.compute.manager [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Terminating instance [ 1096.917014] env[68279]: DEBUG oslo_vmware.api [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.472359} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.917368] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.917602] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1096.917861] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1096.918092] env[68279]: INFO nova.compute.manager [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1096.918445] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.918914] env[68279]: DEBUG nova.compute.manager [-] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1096.919059] env[68279]: DEBUG nova.network.neutron [-] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1097.139261] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Releasing lock "refresh_cache-296358b1-e978-409c-8113-587ae8f806c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.139643] env[68279]: DEBUG nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Instance network_info: |[{"id": "57af1b7b-23d4-429e-b620-f1d918c431e5", "address": "fa:16:3e:04:84:61", "network": {"id": "9b868c4e-a40c-4d58-aae3-2ae167a5d186", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-586340475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebe5dfa4da04412980adbfecc868186a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af1b7b-23", "ovs_interfaceid": "57af1b7b-23d4-429e-b620-f1d918c431e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1097.140099] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:84:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57af1b7b-23d4-429e-b620-f1d918c431e5', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1097.148523] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Creating folder: Project (ebe5dfa4da04412980adbfecc868186a). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1097.149243] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b527101-4973-495e-b4dd-3115383e94c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.163284] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Created folder: Project (ebe5dfa4da04412980adbfecc868186a) in parent group-v594445. [ 1097.163527] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Creating folder: Instances. Parent ref: group-v594728. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1097.163842] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22ded1c0-63bd-4dc9-b5f9-b8eb77cda1b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.175889] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Created folder: Instances in parent group-v594728. [ 1097.176202] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1097.176420] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1097.176666] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15ad3ba1-6c5c-4d1d-8257-a55ed0eff871 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.198642] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1097.198642] env[68279]: value = "task-2963775" [ 1097.198642] env[68279]: _type = "Task" [ 1097.198642] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.208241] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963775, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.320978] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963772, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.382563] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963770, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.389939] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.866s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.394209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.810s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.394209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.394209] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1097.394209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.421s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.394506] env[68279]: INFO nova.compute.claims [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1097.397756] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f415d6de-df6e-4476-8d4d-abb9f96c94ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.412858] env[68279]: DEBUG nova.compute.manager [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.416672] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.419443] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c41bdf7-098f-45e7-8b76-25c936c34cf6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.423770] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3729a7b3-d016-4896-8230-4d43e1d1c1b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.446037] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4163e7-b461-4759-81da-086ac35e3b40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.449020] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.450534] env[68279]: DEBUG nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Received event network-vif-plugged-57af1b7b-23d4-429e-b620-f1d918c431e5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1097.450674] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] Acquiring lock "296358b1-e978-409c-8113-587ae8f806c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.450884] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] Lock "296358b1-e978-409c-8113-587ae8f806c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.451066] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] Lock "296358b1-e978-409c-8113-587ae8f806c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.451321] env[68279]: DEBUG nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] No waiting events found dispatching network-vif-plugged-57af1b7b-23d4-429e-b620-f1d918c431e5 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1097.451922] env[68279]: WARNING nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Received unexpected event network-vif-plugged-57af1b7b-23d4-429e-b620-f1d918c431e5 for instance with vm_state building and task_state spawning. [ 1097.452171] env[68279]: DEBUG nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Received event network-changed-57af1b7b-23d4-429e-b620-f1d918c431e5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1097.452365] env[68279]: DEBUG nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Refreshing instance network info cache due to event network-changed-57af1b7b-23d4-429e-b620-f1d918c431e5. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1097.452617] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] Acquiring lock "refresh_cache-296358b1-e978-409c-8113-587ae8f806c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.453575] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] Acquired lock "refresh_cache-296358b1-e978-409c-8113-587ae8f806c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.453575] env[68279]: DEBUG nova.network.neutron [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Refreshing network info cache for port 57af1b7b-23d4-429e-b620-f1d918c431e5 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.455157] env[68279]: INFO nova.scheduler.client.report [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Deleted allocations for instance fe92e176-222c-4c46-a254-1c12e21c68d0 [ 1097.456328] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7e88e94-3739-4b82-9d84-5141bb76ab8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.470545] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4c5121-74c5-4fcf-82c4-8a040f8927b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.476023] env[68279]: DEBUG oslo_vmware.api [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1097.476023] env[68279]: value = "task-2963776" [ 1097.476023] env[68279]: _type = "Task" [ 1097.476023] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.506927] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178615MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1097.506927] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.515358] env[68279]: DEBUG oslo_vmware.api [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.546623] env[68279]: DEBUG nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1097.577921] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1097.578198] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1097.578361] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1097.578542] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1097.578687] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1097.578831] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1097.579049] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1097.579215] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1097.579385] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1097.579546] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1097.579756] env[68279]: DEBUG nova.virt.hardware [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1097.580654] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bf2fa8-88ff-4211-b45b-7c845913207e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.589228] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2f3bf3-c849-474d-8e37-5e714c6fff70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.709093] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963775, 'name': CreateVM_Task, 'duration_secs': 0.428766} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.709366] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.709980] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.710268] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.710486] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1097.710742] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ceca3e3-28f8-45af-b783-5fd5ad2c3295 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.718156] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1097.718156] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252ba5b-319d-d0a4-23bb-3fc882bc2ac7" [ 1097.718156] env[68279]: _type = "Task" [ 1097.718156] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.726290] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252ba5b-319d-d0a4-23bb-3fc882bc2ac7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.732858] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.733161] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.733307] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.733487] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.733674] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.735879] env[68279]: INFO nova.compute.manager [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Terminating instance [ 1097.817075] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557716} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.817405] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 84b2828a-e62c-45b2-a5ee-067ca66e626b/84b2828a-e62c-45b2-a5ee-067ca66e626b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.817588] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1097.817873] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbe31b64-9978-416b-8a46-286c34dbc03f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.825201] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1097.825201] env[68279]: value = "task-2963777" [ 1097.825201] env[68279]: _type = "Task" [ 1097.825201] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.830676] env[68279]: DEBUG nova.compute.manager [req-9f966ba2-b838-4db6-8214-dc57a18719c5 req-7fd1e6bb-1e11-43c2-9c69-ab76955950f8 service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Received event network-vif-plugged-7cbc8ddd-efea-4295-9a79-1bced1aa3082 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1097.830888] env[68279]: DEBUG oslo_concurrency.lockutils [req-9f966ba2-b838-4db6-8214-dc57a18719c5 req-7fd1e6bb-1e11-43c2-9c69-ab76955950f8 service nova] Acquiring lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.831113] env[68279]: DEBUG oslo_concurrency.lockutils [req-9f966ba2-b838-4db6-8214-dc57a18719c5 req-7fd1e6bb-1e11-43c2-9c69-ab76955950f8 service nova] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.831286] env[68279]: DEBUG oslo_concurrency.lockutils [req-9f966ba2-b838-4db6-8214-dc57a18719c5 req-7fd1e6bb-1e11-43c2-9c69-ab76955950f8 service nova] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.831483] env[68279]: DEBUG nova.compute.manager [req-9f966ba2-b838-4db6-8214-dc57a18719c5 req-7fd1e6bb-1e11-43c2-9c69-ab76955950f8 service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] No waiting events found dispatching network-vif-plugged-7cbc8ddd-efea-4295-9a79-1bced1aa3082 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1097.831651] env[68279]: WARNING nova.compute.manager [req-9f966ba2-b838-4db6-8214-dc57a18719c5 req-7fd1e6bb-1e11-43c2-9c69-ab76955950f8 service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Received unexpected event network-vif-plugged-7cbc8ddd-efea-4295-9a79-1bced1aa3082 for instance with vm_state building and task_state spawning. [ 1097.838267] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.852577] env[68279]: DEBUG nova.network.neutron [-] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.880097] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963770, 'name': CreateSnapshot_Task, 'duration_secs': 1.038525} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.880368] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1097.881131] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4a6a27-9d01-44c9-9d06-70d34cda6ec0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.967144] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a6f2851b-cd50-4e7a-bcef-1a2e219f76bd tempest-ServerRescueNegativeTestJSON-1864597666 tempest-ServerRescueNegativeTestJSON-1864597666-project-member] Lock "fe92e176-222c-4c46-a254-1c12e21c68d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.650s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.988706] env[68279]: DEBUG oslo_vmware.api [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963776, 'name': PowerOffVM_Task, 'duration_secs': 0.272378} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.988973] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.989310] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.989552] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-daff7bae-ebc1-4797-aed2-daed5a5d15ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.082152] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1098.082152] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1098.082152] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Deleting the datastore file [datastore1] e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.082152] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01062add-ba50-470d-968f-c6daf69afb20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.088683] env[68279]: DEBUG oslo_vmware.api [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1098.088683] env[68279]: value = "task-2963779" [ 1098.088683] env[68279]: _type = "Task" [ 1098.088683] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.103551] env[68279]: DEBUG oslo_vmware.api [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.143872] env[68279]: DEBUG nova.network.neutron [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Successfully updated port: 7cbc8ddd-efea-4295-9a79-1bced1aa3082 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.194240] env[68279]: DEBUG nova.network.neutron [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Updated VIF entry in instance network info cache for port 57af1b7b-23d4-429e-b620-f1d918c431e5. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.194622] env[68279]: DEBUG nova.network.neutron [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Updating instance_info_cache with network_info: [{"id": "57af1b7b-23d4-429e-b620-f1d918c431e5", "address": "fa:16:3e:04:84:61", "network": {"id": "9b868c4e-a40c-4d58-aae3-2ae167a5d186", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-586340475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebe5dfa4da04412980adbfecc868186a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af1b7b-23", "ovs_interfaceid": "57af1b7b-23d4-429e-b620-f1d918c431e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.229546] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252ba5b-319d-d0a4-23bb-3fc882bc2ac7, 'name': SearchDatastore_Task, 'duration_secs': 0.019991} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.229895] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.230170] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1098.230415] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.230605] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.230737] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1098.230996] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32961916-fcf7-4d9b-bfb0-00b9de5b164e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.240428] env[68279]: DEBUG nova.compute.manager [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1098.240699] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1098.241278] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1098.241515] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1098.242708] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8684f6f6-982b-4e11-b4ed-391aae06cd9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.245745] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df48149b-111b-46b3-97f2-2ea85cea480d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.251461] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1098.251461] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522aa1a1-3fde-6606-612d-bcf8f6b9fa3a" [ 1098.251461] env[68279]: _type = "Task" [ 1098.251461] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.253737] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1098.256939] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a56fbace-cf52-4f71-a14c-78289475c593 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.264062] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]522aa1a1-3fde-6606-612d-bcf8f6b9fa3a, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.264803] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de16d1f4-8220-4ecb-87b8-62a05ce24311 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.270544] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1098.270544] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52455d38-8acc-013b-df2c-9a1dba60871d" [ 1098.270544] env[68279]: _type = "Task" [ 1098.270544] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.279078] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52455d38-8acc-013b-df2c-9a1dba60871d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.314760] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1098.314992] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1098.315224] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleting the datastore file [datastore1] 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.315591] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcb23e48-cde1-4500-ad62-b90fd95d2001 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.322236] env[68279]: DEBUG oslo_vmware.api [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1098.322236] env[68279]: value = "task-2963781" [ 1098.322236] env[68279]: _type = "Task" [ 1098.322236] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.332100] env[68279]: DEBUG oslo_vmware.api [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.336200] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070438} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.336440] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1098.337175] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e324b0-6dc0-46b2-bd64-1e447451b8de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.360441] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 84b2828a-e62c-45b2-a5ee-067ca66e626b/84b2828a-e62c-45b2-a5ee-067ca66e626b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.361209] env[68279]: INFO nova.compute.manager [-] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Took 1.44 seconds to deallocate network for instance. [ 1098.361478] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8996c38-64ad-41d1-85da-e027ac3082c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.383226] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1098.383226] env[68279]: value = "task-2963782" [ 1098.383226] env[68279]: _type = "Task" [ 1098.383226] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.391542] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963782, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.399186] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1098.399488] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-eb9f7901-5c38-408d-b810-7e37f8fb8003 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.409960] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1098.409960] env[68279]: value = "task-2963783" [ 1098.409960] env[68279]: _type = "Task" [ 1098.409960] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.421451] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963783, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.601148] env[68279]: DEBUG oslo_vmware.api [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155767} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.605444] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.605444] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.605705] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.605941] env[68279]: INFO nova.compute.manager [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: e3763645-5a78-4929-98a3-108e72071211] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1098.606345] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.607033] env[68279]: DEBUG nova.compute.manager [-] [instance: e3763645-5a78-4929-98a3-108e72071211] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1098.607187] env[68279]: DEBUG nova.network.neutron [-] [instance: e3763645-5a78-4929-98a3-108e72071211] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.634640] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f283b8b3-d083-4b4c-83c5-0f6179fc9ba7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.644390] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54518639-5942-433a-bc86-a13fa7e155e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.649387] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "refresh_cache-fddf4cb7-cffb-41bb-9806-b8f69579cfef" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.649480] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquired lock "refresh_cache-fddf4cb7-cffb-41bb-9806-b8f69579cfef" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.650070] env[68279]: DEBUG nova.network.neutron [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.688024] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f68ca6-523f-4dcd-90c6-dccfddb48beb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.695013] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edb40c6-9fdd-4037-b78c-763488f882be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.701900] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] Releasing lock "refresh_cache-296358b1-e978-409c-8113-587ae8f806c7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.701900] env[68279]: DEBUG nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Received event network-vif-deleted-87277960-0dc5-4e95-a4ac-6542504f83ee {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.701900] env[68279]: INFO nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Neutron deleted interface 87277960-0dc5-4e95-a4ac-6542504f83ee; detaching it from the instance and deleting it from the info cache [ 1098.701900] env[68279]: DEBUG nova.network.neutron [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.714829] env[68279]: DEBUG nova.compute.provider_tree [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.781771] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52455d38-8acc-013b-df2c-9a1dba60871d, 'name': SearchDatastore_Task, 'duration_secs': 0.009861} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.782051] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.782359] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 296358b1-e978-409c-8113-587ae8f806c7/296358b1-e978-409c-8113-587ae8f806c7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1098.782598] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-572dce1e-24ae-4c54-a3ee-1483b8ba7ddc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.791942] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1098.791942] env[68279]: value = "task-2963784" [ 1098.791942] env[68279]: _type = "Task" [ 1098.791942] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.800769] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.834466] env[68279]: DEBUG oslo_vmware.api [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199082} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.834466] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.834466] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.834466] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.834466] env[68279]: INFO nova.compute.manager [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1098.834466] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.834466] env[68279]: DEBUG nova.compute.manager [-] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1098.834466] env[68279]: DEBUG nova.network.neutron [-] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.881592] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.896600] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963782, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.921121] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963783, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.202997] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63c21b5f-2dbc-4f7d-820e-20b1cd34f2df {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.218947] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ca78c9-7d32-46b2-afab-b9261993ce7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.229059] env[68279]: DEBUG nova.network.neutron [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.232267] env[68279]: DEBUG nova.scheduler.client.report [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.259910] env[68279]: DEBUG nova.compute.manager [req-cf8cb23e-0225-4b4a-bd62-896f74a77330 req-a93554c5-09ea-4d89-b481-d2fee2dd2938 service nova] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Detach interface failed, port_id=87277960-0dc5-4e95-a4ac-6542504f83ee, reason: Instance 66b2ce98-9a8a-4344-bd7d-80b7fa001344 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1099.302898] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963784, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.395318] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963782, 'name': ReconfigVM_Task, 'duration_secs': 0.740298} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.395621] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 84b2828a-e62c-45b2-a5ee-067ca66e626b/84b2828a-e62c-45b2-a5ee-067ca66e626b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.396322] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3daf5f5e-5d06-4f6c-929f-82485e91dfe6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.405313] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1099.405313] env[68279]: value = "task-2963785" [ 1099.405313] env[68279]: _type = "Task" [ 1099.405313] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.415088] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963785, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.426338] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963783, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.476757] env[68279]: DEBUG nova.compute.manager [req-d1a90158-153f-42eb-a4c2-41f737622014 req-50a94dae-4d31-4e4d-8e6d-678ccdd5a8a2 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Received event network-vif-deleted-f1c4e041-ced5-433b-a721-e9fa16d159ce {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1099.476992] env[68279]: INFO nova.compute.manager [req-d1a90158-153f-42eb-a4c2-41f737622014 req-50a94dae-4d31-4e4d-8e6d-678ccdd5a8a2 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Neutron deleted interface f1c4e041-ced5-433b-a721-e9fa16d159ce; detaching it from the instance and deleting it from the info cache [ 1099.477491] env[68279]: DEBUG nova.network.neutron [req-d1a90158-153f-42eb-a4c2-41f737622014 req-50a94dae-4d31-4e4d-8e6d-678ccdd5a8a2 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.512701] env[68279]: DEBUG nova.network.neutron [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Updating instance_info_cache with network_info: [{"id": "7cbc8ddd-efea-4295-9a79-1bced1aa3082", "address": "fa:16:3e:2a:6c:ae", "network": {"id": "a5a46993-8a01-477f-9a1d-088c0d9ab8bd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1387147092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d260342c3d534d41b74da099229f4ecd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbc8ddd-ef", "ovs_interfaceid": "7cbc8ddd-efea-4295-9a79-1bced1aa3082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.683043] env[68279]: DEBUG nova.network.neutron [-] [instance: e3763645-5a78-4929-98a3-108e72071211] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.738905] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.738905] env[68279]: DEBUG nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1099.743470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.444s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.743470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.744946] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.239s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.776651] env[68279]: INFO nova.scheduler.client.report [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted allocations for instance a96ea5b4-39c5-4a24-873f-54480f876fbf [ 1099.810022] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689942} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.810022] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 296358b1-e978-409c-8113-587ae8f806c7/296358b1-e978-409c-8113-587ae8f806c7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1099.810022] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1099.810022] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed116204-d246-4969-a34d-acd62c507a52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.816724] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1099.816724] env[68279]: value = "task-2963786" [ 1099.816724] env[68279]: _type = "Task" [ 1099.816724] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.823726] env[68279]: DEBUG nova.network.neutron [-] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.829797] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963786, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.883126] env[68279]: DEBUG nova.compute.manager [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Received event network-changed-7cbc8ddd-efea-4295-9a79-1bced1aa3082 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1099.883126] env[68279]: DEBUG nova.compute.manager [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Refreshing instance network info cache due to event network-changed-7cbc8ddd-efea-4295-9a79-1bced1aa3082. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1099.883126] env[68279]: DEBUG oslo_concurrency.lockutils [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] Acquiring lock "refresh_cache-fddf4cb7-cffb-41bb-9806-b8f69579cfef" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.921042] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963785, 'name': Rename_Task, 'duration_secs': 0.18305} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.923661] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1099.924069] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16b049c2-1d01-4234-a611-82d7b5c6893e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.933160] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1099.933160] env[68279]: value = "task-2963787" [ 1099.933160] env[68279]: _type = "Task" [ 1099.933160] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.936333] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963783, 'name': CloneVM_Task, 'duration_secs': 1.465689} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.939769] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Created linked-clone VM from snapshot [ 1099.940578] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742879a2-932b-4fe5-ad7d-19dfc3ac1139 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.949884] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.953771] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Uploading image 4f678577-45a3-48b3-b6bb-321a68ff5e6e {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1099.980998] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb12dcce-f853-42cd-b41b-738c236334e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.986486] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1099.986486] env[68279]: value = "vm-594732" [ 1099.986486] env[68279]: _type = "VirtualMachine" [ 1099.986486] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1099.986797] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2ac851b6-0ff3-471f-96a8-86fe52046b24 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.993696] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f466b4-1264-426a-9ebe-b3f50bde885e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.006622] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease: (returnval){ [ 1100.006622] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520bcab8-3473-fce0-2b22-f8d43e0b1048" [ 1100.006622] env[68279]: _type = "HttpNfcLease" [ 1100.006622] env[68279]: } obtained for exporting VM: (result){ [ 1100.006622] env[68279]: value = "vm-594732" [ 1100.006622] env[68279]: _type = "VirtualMachine" [ 1100.006622] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1100.007038] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the lease: (returnval){ [ 1100.007038] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520bcab8-3473-fce0-2b22-f8d43e0b1048" [ 1100.007038] env[68279]: _type = "HttpNfcLease" [ 1100.007038] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1100.014596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Releasing lock "refresh_cache-fddf4cb7-cffb-41bb-9806-b8f69579cfef" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.014924] env[68279]: DEBUG nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Instance network_info: |[{"id": "7cbc8ddd-efea-4295-9a79-1bced1aa3082", "address": "fa:16:3e:2a:6c:ae", "network": {"id": "a5a46993-8a01-477f-9a1d-088c0d9ab8bd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1387147092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d260342c3d534d41b74da099229f4ecd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbc8ddd-ef", "ovs_interfaceid": "7cbc8ddd-efea-4295-9a79-1bced1aa3082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1100.015188] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1100.015188] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520bcab8-3473-fce0-2b22-f8d43e0b1048" [ 1100.015188] env[68279]: _type = "HttpNfcLease" [ 1100.015188] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1100.015424] env[68279]: DEBUG oslo_concurrency.lockutils [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] Acquired lock "refresh_cache-fddf4cb7-cffb-41bb-9806-b8f69579cfef" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.015605] env[68279]: DEBUG nova.network.neutron [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Refreshing network info cache for port 7cbc8ddd-efea-4295-9a79-1bced1aa3082 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1100.016867] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:6c:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8145bd31-c4a7-4828-8818-d065010c9565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cbc8ddd-efea-4295-9a79-1bced1aa3082', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.024239] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Creating folder: Project (d260342c3d534d41b74da099229f4ecd). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1100.034684] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab65ed8e-7694-4a2b-b8ec-b633f5be3f7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.036535] env[68279]: DEBUG nova.compute.manager [req-d1a90158-153f-42eb-a4c2-41f737622014 req-50a94dae-4d31-4e4d-8e6d-678ccdd5a8a2 service nova] [instance: e3763645-5a78-4929-98a3-108e72071211] Detach interface failed, port_id=f1c4e041-ced5-433b-a721-e9fa16d159ce, reason: Instance e3763645-5a78-4929-98a3-108e72071211 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1100.047152] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Created folder: Project (d260342c3d534d41b74da099229f4ecd) in parent group-v594445. [ 1100.047335] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Creating folder: Instances. Parent ref: group-v594733. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1100.047566] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9baa8a0-933e-47ab-9d18-418b13111521 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.056852] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Created folder: Instances in parent group-v594733. [ 1100.058028] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1100.058028] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.058028] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc10a148-6c81-4b80-a653-3b2d5d33b141 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.077832] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.077832] env[68279]: value = "task-2963791" [ 1100.077832] env[68279]: _type = "Task" [ 1100.077832] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.085561] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963791, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.189395] env[68279]: INFO nova.compute.manager [-] [instance: e3763645-5a78-4929-98a3-108e72071211] Took 1.58 seconds to deallocate network for instance. [ 1100.243710] env[68279]: DEBUG nova.compute.utils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1100.245129] env[68279]: DEBUG nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1100.245306] env[68279]: DEBUG nova.network.neutron [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1100.286871] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e047b4c2-3d7d-4f4d-ba9d-f32fd54995d3 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "a96ea5b4-39c5-4a24-873f-54480f876fbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.939s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.316922] env[68279]: DEBUG nova.policy [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '655eae57bb1349c0a229c3b57f4d3446', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f947b60992d543c4b0bfee2553bfe357', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1100.326136] env[68279]: INFO nova.compute.manager [-] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Took 1.49 seconds to deallocate network for instance. [ 1100.331534] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963786, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068913} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.334392] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1100.335225] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecbcf07-c1b7-4386-9b6c-a98dc37bbd1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.365838] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 296358b1-e978-409c-8113-587ae8f806c7/296358b1-e978-409c-8113-587ae8f806c7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.366202] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bb1a888-1574-48b9-94d6-692f2ff4718d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.392564] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1100.392564] env[68279]: value = "task-2963792" [ 1100.392564] env[68279]: _type = "Task" [ 1100.392564] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.406113] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963792, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.449137] env[68279]: DEBUG oslo_vmware.api [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963787, 'name': PowerOnVM_Task, 'duration_secs': 0.493647} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.449137] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1100.449137] env[68279]: INFO nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1100.449137] env[68279]: DEBUG nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1100.449137] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7198bb50-4600-4968-8091-f56818c376ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.516266] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1100.516266] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520bcab8-3473-fce0-2b22-f8d43e0b1048" [ 1100.516266] env[68279]: _type = "HttpNfcLease" [ 1100.516266] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1100.516463] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1100.516463] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520bcab8-3473-fce0-2b22-f8d43e0b1048" [ 1100.516463] env[68279]: _type = "HttpNfcLease" [ 1100.516463] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1100.517172] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b41450b-c982-464d-b9f8-426e4dcd33e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.527760] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5215106d-1095-48a6-d11e-ae9210f506f7/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1100.527968] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5215106d-1095-48a6-d11e-ae9210f506f7/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1100.607038] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963791, 'name': CreateVM_Task, 'duration_secs': 0.385457} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.607230] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1100.607951] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.608160] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.608489] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1100.608830] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-367731d7-f3f9-41a7-ad67-3fad7f9d2d2c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.614146] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1100.614146] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a22e0d-9dbc-5780-dfbc-9eafed3739b6" [ 1100.614146] env[68279]: _type = "Task" [ 1100.614146] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.621927] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a22e0d-9dbc-5780-dfbc-9eafed3739b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.695345] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.698367] env[68279]: DEBUG nova.network.neutron [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Successfully created port: 99b8877f-8923-4dc7-8f41-91034ef1aa8f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1100.721200] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e688618b-0ec3-4aab-8f8a-259e825e8e87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.748164] env[68279]: DEBUG nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1100.786408] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.786559] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e3763645-5a78-4929-98a3-108e72071211 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.786700] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7e34039c-c51a-4f9c-961c-144f6d8a5130 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.786790] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 50f390b2-99b7-49f3-997f-7d7b50cff9f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.786907] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787041] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance f6a65d1b-ba9c-44b7-b9aa-815cabd45176 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787169] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 525e4894-a8b1-45ae-a846-84ded8d97584 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787304] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e3da334a-1dfc-41d8-8ba8-aabe53924bdc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787769] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787769] env[68279]: WARNING nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 66b2ce98-9a8a-4344-bd7d-80b7fa001344 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1100.787769] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 84b2828a-e62c-45b2-a5ee-067ca66e626b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787769] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 296358b1-e978-409c-8113-587ae8f806c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787990] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance fddf4cb7-cffb-41bb-9806-b8f69579cfef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.787990] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 778efb81-2562-4d55-ace0-09722d92fa5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.788224] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1100.788616] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1100.842089] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.897112] env[68279]: DEBUG nova.network.neutron [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Updated VIF entry in instance network info cache for port 7cbc8ddd-efea-4295-9a79-1bced1aa3082. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1100.897583] env[68279]: DEBUG nova.network.neutron [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Updating instance_info_cache with network_info: [{"id": "7cbc8ddd-efea-4295-9a79-1bced1aa3082", "address": "fa:16:3e:2a:6c:ae", "network": {"id": "a5a46993-8a01-477f-9a1d-088c0d9ab8bd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1387147092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d260342c3d534d41b74da099229f4ecd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbc8ddd-ef", "ovs_interfaceid": "7cbc8ddd-efea-4295-9a79-1bced1aa3082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.907084] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963792, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.967970] env[68279]: INFO nova.compute.manager [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Took 22.55 seconds to build instance. [ 1101.045934] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe31ed6-9e29-490d-9067-3dbc14b1b400 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.055469] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8355800b-8dcf-459d-959c-31f9b3fc1206 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.089168] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eef91d1-b053-43e4-8771-49433bae916c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.098532] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93afc60-4bd6-43c8-b5cd-fc5c155d6244 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.111175] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.123645] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a22e0d-9dbc-5780-dfbc-9eafed3739b6, 'name': SearchDatastore_Task, 'duration_secs': 0.020768} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.124160] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.124406] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.124700] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.124901] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.125153] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.125466] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23df2b5c-5883-44f9-9c0d-01768941574f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.133851] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1101.134069] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1101.134854] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-980b1f7f-fa34-418c-8ae9-f6b53c30be4b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.140494] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1101.140494] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52019e7b-0096-d8f7-c8c5-a114d70016c0" [ 1101.140494] env[68279]: _type = "Task" [ 1101.140494] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.150599] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52019e7b-0096-d8f7-c8c5-a114d70016c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.404249] env[68279]: DEBUG oslo_concurrency.lockutils [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] Releasing lock "refresh_cache-fddf4cb7-cffb-41bb-9806-b8f69579cfef" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.404594] env[68279]: DEBUG nova.compute.manager [req-bdbd647f-e690-420f-9a7d-aced6b7ac9bc req-6681bb05-4dcd-4e5c-b69b-3abafe2343de service nova] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Received event network-vif-deleted-45600165-3cae-4f5f-a5ab-9c71c5d7035d {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1101.404904] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963792, 'name': ReconfigVM_Task, 'duration_secs': 0.706065} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.405162] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 296358b1-e978-409c-8113-587ae8f806c7/296358b1-e978-409c-8113-587ae8f806c7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.405854] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-faaee6ba-1721-475e-8cf8-bde5216a2486 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.411972] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1101.411972] env[68279]: value = "task-2963793" [ 1101.411972] env[68279]: _type = "Task" [ 1101.411972] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.421054] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963793, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.474782] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78f8f300-ad62-414b-8f10-2f594f702cb4 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.074s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.617376] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.653844] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52019e7b-0096-d8f7-c8c5-a114d70016c0, 'name': SearchDatastore_Task, 'duration_secs': 0.016715} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.654735] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9331a26-8d6c-49aa-92c3-d6f204f56fba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.659970] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1101.659970] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d7515b-ac3e-9720-1ff1-2d5a19447a2e" [ 1101.659970] env[68279]: _type = "Task" [ 1101.659970] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.668815] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d7515b-ac3e-9720-1ff1-2d5a19447a2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.765025] env[68279]: DEBUG nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1101.792648] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.792648] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.793160] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.793471] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.793715] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.795023] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.795023] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.795023] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.795023] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.795023] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.795023] env[68279]: DEBUG nova.virt.hardware [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.796115] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4728bc4-b5b3-48f4-9a51-0c27e509fb00 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.806089] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c0ab47-feef-4913-8a88-bc635cc9e056 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.922341] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963793, 'name': Rename_Task, 'duration_secs': 0.215173} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.922879] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1101.923250] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c100379-b560-4258-86ed-462e7e8ff617 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.930417] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1101.930417] env[68279]: value = "task-2963794" [ 1101.930417] env[68279]: _type = "Task" [ 1101.930417] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.941717] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963794, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.999872] env[68279]: DEBUG nova.compute.manager [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Received event network-changed-80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1101.999872] env[68279]: DEBUG nova.compute.manager [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Refreshing instance network info cache due to event network-changed-80e209dd-e4b0-4331-87a6-92e23bdfa270. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1102.001481] env[68279]: DEBUG oslo_concurrency.lockutils [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.001667] env[68279]: DEBUG oslo_concurrency.lockutils [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.001845] env[68279]: DEBUG nova.network.neutron [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Refreshing network info cache for port 80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1102.122121] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1102.122121] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.376s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.122121] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.240s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.122121] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.124309] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.429s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.125333] env[68279]: DEBUG nova.objects.instance [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'resources' on Instance uuid e3763645-5a78-4929-98a3-108e72071211 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.149719] env[68279]: INFO nova.scheduler.client.report [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted allocations for instance 66b2ce98-9a8a-4344-bd7d-80b7fa001344 [ 1102.172622] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d7515b-ac3e-9720-1ff1-2d5a19447a2e, 'name': SearchDatastore_Task, 'duration_secs': 0.014737} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.172898] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.173323] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] fddf4cb7-cffb-41bb-9806-b8f69579cfef/fddf4cb7-cffb-41bb-9806-b8f69579cfef.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1102.173698] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-739b3e1c-1486-48b9-8a6c-59cc31058ee4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.180938] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1102.180938] env[68279]: value = "task-2963795" [ 1102.180938] env[68279]: _type = "Task" [ 1102.180938] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.189748] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.232963] env[68279]: DEBUG nova.compute.manager [req-d820928a-39d9-4390-accf-3a3f664cb71c req-6104ff55-d594-43a1-9e3c-dfeb778dd121 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Received event network-vif-plugged-99b8877f-8923-4dc7-8f41-91034ef1aa8f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.233309] env[68279]: DEBUG oslo_concurrency.lockutils [req-d820928a-39d9-4390-accf-3a3f664cb71c req-6104ff55-d594-43a1-9e3c-dfeb778dd121 service nova] Acquiring lock "778efb81-2562-4d55-ace0-09722d92fa5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.233589] env[68279]: DEBUG oslo_concurrency.lockutils [req-d820928a-39d9-4390-accf-3a3f664cb71c req-6104ff55-d594-43a1-9e3c-dfeb778dd121 service nova] Lock "778efb81-2562-4d55-ace0-09722d92fa5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.233878] env[68279]: DEBUG oslo_concurrency.lockutils [req-d820928a-39d9-4390-accf-3a3f664cb71c req-6104ff55-d594-43a1-9e3c-dfeb778dd121 service nova] Lock "778efb81-2562-4d55-ace0-09722d92fa5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.234093] env[68279]: DEBUG nova.compute.manager [req-d820928a-39d9-4390-accf-3a3f664cb71c req-6104ff55-d594-43a1-9e3c-dfeb778dd121 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] No waiting events found dispatching network-vif-plugged-99b8877f-8923-4dc7-8f41-91034ef1aa8f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1102.234342] env[68279]: WARNING nova.compute.manager [req-d820928a-39d9-4390-accf-3a3f664cb71c req-6104ff55-d594-43a1-9e3c-dfeb778dd121 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Received unexpected event network-vif-plugged-99b8877f-8923-4dc7-8f41-91034ef1aa8f for instance with vm_state building and task_state spawning. [ 1102.336310] env[68279]: DEBUG nova.network.neutron [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Successfully updated port: 99b8877f-8923-4dc7-8f41-91034ef1aa8f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1102.441316] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963794, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.657204] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4843a779-2aa6-48fd-81bf-e8ff284e7c00 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "66b2ce98-9a8a-4344-bd7d-80b7fa001344" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.400s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.700956] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963795, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.766412] env[68279]: DEBUG nova.network.neutron [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updated VIF entry in instance network info cache for port 80e209dd-e4b0-4331-87a6-92e23bdfa270. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1102.766821] env[68279]: DEBUG nova.network.neutron [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.838891] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc5a92b-3ee4-4d5c-86e7-1ca906bb3ac1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.843886] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "refresh_cache-778efb81-2562-4d55-ace0-09722d92fa5b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.844159] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "refresh_cache-778efb81-2562-4d55-ace0-09722d92fa5b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.844414] env[68279]: DEBUG nova.network.neutron [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.851918] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0830523d-f3d0-44aa-b7eb-fd804e1c78a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.895256] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3cac1a-8682-442a-829f-94fb94a4c7af {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.909478] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0115af9f-6ff6-46b1-bce5-7dc966714ef7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.927494] env[68279]: DEBUG nova.compute.provider_tree [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.943626] env[68279]: DEBUG oslo_vmware.api [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963794, 'name': PowerOnVM_Task, 'duration_secs': 0.942324} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.944274] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1102.944627] env[68279]: INFO nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1102.944870] env[68279]: DEBUG nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1102.945964] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03451334-d4c0-4780-89df-b953a28a79e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.193090] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963795, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.606022} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.193403] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] fddf4cb7-cffb-41bb-9806-b8f69579cfef/fddf4cb7-cffb-41bb-9806-b8f69579cfef.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1103.193642] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1103.193917] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dcba87de-ddab-4ab5-ad9b-18efce59d694 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.200905] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1103.200905] env[68279]: value = "task-2963796" [ 1103.200905] env[68279]: _type = "Task" [ 1103.200905] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.209184] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.244038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "525e4894-a8b1-45ae-a846-84ded8d97584" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.244380] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "525e4894-a8b1-45ae-a846-84ded8d97584" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.244601] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "525e4894-a8b1-45ae-a846-84ded8d97584-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.244811] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "525e4894-a8b1-45ae-a846-84ded8d97584-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.245015] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "525e4894-a8b1-45ae-a846-84ded8d97584-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.247285] env[68279]: INFO nova.compute.manager [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Terminating instance [ 1103.269926] env[68279]: DEBUG oslo_concurrency.lockutils [req-2a7f9a01-e7d5-4e33-b6bf-7d26f2d7159e req-c3f12280-5d8e-451e-93ea-b3d867fb673a service nova] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.378079] env[68279]: DEBUG nova.network.neutron [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1103.430676] env[68279]: DEBUG nova.scheduler.client.report [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.465718] env[68279]: INFO nova.compute.manager [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Took 24.63 seconds to build instance. [ 1103.544600] env[68279]: DEBUG nova.network.neutron [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Updating instance_info_cache with network_info: [{"id": "99b8877f-8923-4dc7-8f41-91034ef1aa8f", "address": "fa:16:3e:11:be:4c", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b8877f-89", "ovs_interfaceid": "99b8877f-8923-4dc7-8f41-91034ef1aa8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.711165] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079357} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.711456] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1103.712256] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86017c8c-100d-4bf9-aacd-ed128b9b721c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.735865] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] fddf4cb7-cffb-41bb-9806-b8f69579cfef/fddf4cb7-cffb-41bb-9806-b8f69579cfef.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.736054] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44787f5a-1099-4630-98d2-85d1ce830710 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.752030] env[68279]: DEBUG nova.compute.manager [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1103.752261] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1103.753243] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a933bc-c262-43e7-8410-4b913386e3b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.761653] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.762920] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d747d741-ccd8-4607-8523-e1a711556823 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.764559] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1103.764559] env[68279]: value = "task-2963797" [ 1103.764559] env[68279]: _type = "Task" [ 1103.764559] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.768916] env[68279]: DEBUG oslo_vmware.api [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1103.768916] env[68279]: value = "task-2963798" [ 1103.768916] env[68279]: _type = "Task" [ 1103.768916] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.775641] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963797, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.782219] env[68279]: DEBUG oslo_vmware.api [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963798, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.935589] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.938193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.096s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.938467] env[68279]: DEBUG nova.objects.instance [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'resources' on Instance uuid 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.965130] env[68279]: INFO nova.scheduler.client.report [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Deleted allocations for instance e3763645-5a78-4929-98a3-108e72071211 [ 1103.969555] env[68279]: DEBUG oslo_concurrency.lockutils [None req-351cb0fc-49f8-4fe6-8dbf-2efd2c1581d6 tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "296358b1-e978-409c-8113-587ae8f806c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.145s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.051533] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "refresh_cache-778efb81-2562-4d55-ace0-09722d92fa5b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.051533] env[68279]: DEBUG nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Instance network_info: |[{"id": "99b8877f-8923-4dc7-8f41-91034ef1aa8f", "address": "fa:16:3e:11:be:4c", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b8877f-89", "ovs_interfaceid": "99b8877f-8923-4dc7-8f41-91034ef1aa8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1104.051749] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:be:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99b8877f-8923-4dc7-8f41-91034ef1aa8f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1104.061684] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1104.061908] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1104.062720] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8912d7c7-ed99-4405-b176-ff8e88422e7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.087130] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1104.087130] env[68279]: value = "task-2963799" [ 1104.087130] env[68279]: _type = "Task" [ 1104.087130] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.095502] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963799, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.276709] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963797, 'name': ReconfigVM_Task, 'duration_secs': 0.35133} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.277457] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Reconfigured VM instance instance-0000006a to attach disk [datastore2] fddf4cb7-cffb-41bb-9806-b8f69579cfef/fddf4cb7-cffb-41bb-9806-b8f69579cfef.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.278180] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79052fa7-9ba1-482a-8483-b802a8a07b19 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.283483] env[68279]: DEBUG oslo_vmware.api [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963798, 'name': PowerOffVM_Task, 'duration_secs': 0.210304} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.284172] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.284398] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.284678] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f447039-2a7a-4b44-8396-c81469b513d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.287933] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1104.287933] env[68279]: value = "task-2963800" [ 1104.287933] env[68279]: _type = "Task" [ 1104.287933] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.296011] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963800, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.301109] env[68279]: DEBUG nova.compute.manager [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Received event network-changed-99b8877f-8923-4dc7-8f41-91034ef1aa8f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1104.301153] env[68279]: DEBUG nova.compute.manager [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Refreshing instance network info cache due to event network-changed-99b8877f-8923-4dc7-8f41-91034ef1aa8f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1104.301369] env[68279]: DEBUG oslo_concurrency.lockutils [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] Acquiring lock "refresh_cache-778efb81-2562-4d55-ace0-09722d92fa5b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.301547] env[68279]: DEBUG oslo_concurrency.lockutils [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] Acquired lock "refresh_cache-778efb81-2562-4d55-ace0-09722d92fa5b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.301725] env[68279]: DEBUG nova.network.neutron [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Refreshing network info cache for port 99b8877f-8923-4dc7-8f41-91034ef1aa8f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.450840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "296358b1-e978-409c-8113-587ae8f806c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.451107] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "296358b1-e978-409c-8113-587ae8f806c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.451475] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "296358b1-e978-409c-8113-587ae8f806c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.451532] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "296358b1-e978-409c-8113-587ae8f806c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.451728] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "296358b1-e978-409c-8113-587ae8f806c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.453892] env[68279]: INFO nova.compute.manager [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Terminating instance [ 1104.474496] env[68279]: DEBUG oslo_concurrency.lockutils [None req-62c0691e-dc42-4b8a-a8a9-f3dc77f4630d tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "e3763645-5a78-4929-98a3-108e72071211" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.575s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.579790] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1104.579968] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1104.580174] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleting the datastore file [datastore2] 525e4894-a8b1-45ae-a846-84ded8d97584 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.582859] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8855489e-cd19-4ab9-a435-b51f02f032e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.589226] env[68279]: DEBUG oslo_vmware.api [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1104.589226] env[68279]: value = "task-2963802" [ 1104.589226] env[68279]: _type = "Task" [ 1104.589226] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.602490] env[68279]: DEBUG oslo_vmware.api [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.605440] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963799, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.639710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-f6a65d1b-ba9c-44b7-b9aa-815cabd45176-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.640053] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-f6a65d1b-ba9c-44b7-b9aa-815cabd45176-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.640442] env[68279]: DEBUG nova.objects.instance [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'flavor' on Instance uuid f6a65d1b-ba9c-44b7-b9aa-815cabd45176 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.707231] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12d8715-7476-442f-81e4-a9d4f6854d45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.715276] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eec1a98-70f9-4e00-9ff4-98f24c2c513b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.745949] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f8e0b7-a995-49a4-bc41-d315fff8bf69 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.754158] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5100d3c-06bb-41e6-8f25-792adaa57bbe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.768116] env[68279]: DEBUG nova.compute.provider_tree [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.799334] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963800, 'name': Rename_Task, 'duration_secs': 0.224789} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.799615] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1104.799886] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cf8b3a2-a901-47cb-9fc0-fb00f8a5330c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.807255] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1104.807255] env[68279]: value = "task-2963803" [ 1104.807255] env[68279]: _type = "Task" [ 1104.807255] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.816063] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.958103] env[68279]: DEBUG nova.compute.manager [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1104.958352] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.959304] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b723e7a8-ea9f-40e1-895a-f48fd840ff5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.967222] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.967490] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-457d59cc-6cb4-431c-823e-90b3137d216c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.973487] env[68279]: DEBUG oslo_vmware.api [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1104.973487] env[68279]: value = "task-2963804" [ 1104.973487] env[68279]: _type = "Task" [ 1104.973487] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.981553] env[68279]: DEBUG oslo_vmware.api [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963804, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.033097] env[68279]: DEBUG nova.network.neutron [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Updated VIF entry in instance network info cache for port 99b8877f-8923-4dc7-8f41-91034ef1aa8f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1105.033561] env[68279]: DEBUG nova.network.neutron [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Updating instance_info_cache with network_info: [{"id": "99b8877f-8923-4dc7-8f41-91034ef1aa8f", "address": "fa:16:3e:11:be:4c", "network": {"id": "87844180-511f-40ff-8519-b151708941ce", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1744137091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f947b60992d543c4b0bfee2553bfe357", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b8877f-89", "ovs_interfaceid": "99b8877f-8923-4dc7-8f41-91034ef1aa8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.108845] env[68279]: DEBUG oslo_vmware.api [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256313} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.112107] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.112309] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.112724] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.112926] env[68279]: INFO nova.compute.manager [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1105.113219] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.113726] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963799, 'name': CreateVM_Task, 'duration_secs': 0.764604} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.113924] env[68279]: DEBUG nova.compute.manager [-] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.114030] env[68279]: DEBUG nova.network.neutron [-] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.115653] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.116380] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.116541] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.116858] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1105.117415] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c0754ab-55be-442f-9492-fa869a14c570 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.122958] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1105.122958] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524b3d1c-8fb8-7c7d-382d-feb4ede8cfb6" [ 1105.122958] env[68279]: _type = "Task" [ 1105.122958] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.132095] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524b3d1c-8fb8-7c7d-382d-feb4ede8cfb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.272119] env[68279]: DEBUG nova.scheduler.client.report [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.313576] env[68279]: DEBUG nova.objects.instance [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'pci_requests' on Instance uuid f6a65d1b-ba9c-44b7-b9aa-815cabd45176 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.320632] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.483690] env[68279]: DEBUG oslo_vmware.api [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963804, 'name': PowerOffVM_Task, 'duration_secs': 0.278229} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.484010] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.484144] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.484395] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e3b862f-d0b2-4ebc-9cdb-22b66018c30b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.536357] env[68279]: DEBUG oslo_concurrency.lockutils [req-54a5759e-807f-43d9-81d2-818d3c3cd2ae req-8a56c441-cedb-4f54-952b-5c8129baf758 service nova] Releasing lock "refresh_cache-778efb81-2562-4d55-ace0-09722d92fa5b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.557064] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.557308] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.557439] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Deleting the datastore file [datastore2] 296358b1-e978-409c-8113-587ae8f806c7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.557659] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fca3e732-d0da-4c6c-a259-45b7c4419580 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.564720] env[68279]: DEBUG oslo_vmware.api [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for the task: (returnval){ [ 1105.564720] env[68279]: value = "task-2963806" [ 1105.564720] env[68279]: _type = "Task" [ 1105.564720] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.572944] env[68279]: DEBUG oslo_vmware.api [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.633930] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524b3d1c-8fb8-7c7d-382d-feb4ede8cfb6, 'name': SearchDatastore_Task, 'duration_secs': 0.022686} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.634311] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.634605] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1105.634772] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.634922] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.635119] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1105.635388] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a7b29d0-c740-45bf-8a7c-b2db723114f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.645229] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1105.645417] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1105.646676] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c04dc02-8fae-4cc0-adf6-e1c27ace324a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.652694] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1105.652694] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52edc558-b470-6ffd-9316-09c7cfba9cf6" [ 1105.652694] env[68279]: _type = "Task" [ 1105.652694] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.660528] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52edc558-b470-6ffd-9316-09c7cfba9cf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.780362] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.804918] env[68279]: INFO nova.scheduler.client.report [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted allocations for instance 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5 [ 1105.818112] env[68279]: DEBUG nova.objects.base [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1105.818333] env[68279]: DEBUG nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1105.820317] env[68279]: DEBUG oslo_vmware.api [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963803, 'name': PowerOnVM_Task, 'duration_secs': 0.979193} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.820443] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.820598] env[68279]: INFO nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1105.820780] env[68279]: DEBUG nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.822029] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74ac7ae-709b-4bfb-8f31-4178f33c4bc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.861350] env[68279]: DEBUG nova.policy [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1105.912307] env[68279]: DEBUG nova.network.neutron [-] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.076185] env[68279]: DEBUG oslo_vmware.api [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Task: {'id': task-2963806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375979} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.076588] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.076910] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.077225] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.077515] env[68279]: INFO nova.compute.manager [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1106.077868] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.078185] env[68279]: DEBUG nova.compute.manager [-] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.078407] env[68279]: DEBUG nova.network.neutron [-] [instance: 296358b1-e978-409c-8113-587ae8f806c7] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.166024] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52edc558-b470-6ffd-9316-09c7cfba9cf6, 'name': SearchDatastore_Task, 'duration_secs': 0.019274} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.166024] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8ecbc9e-c0a9-4f95-9162-11b1b3d14981 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.170952] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1106.170952] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52837cdb-7515-e2c3-d52a-f7111b466b8b" [ 1106.170952] env[68279]: _type = "Task" [ 1106.170952] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.181567] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52837cdb-7515-e2c3-d52a-f7111b466b8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.239067] env[68279]: DEBUG nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Successfully created port: b892325d-c8dc-4176-b161-d5d789cd4e40 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1106.314093] env[68279]: DEBUG oslo_concurrency.lockutils [None req-47d4a758-15b7-4e4e-9a2f-884c4caaed4b tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.581s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.343897] env[68279]: DEBUG nova.compute.manager [req-e2762bfc-b1cd-45b7-a7ff-64e5dfaab084 req-a7d03727-5bd0-4c5f-8b96-34bba866ef27 service nova] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Received event network-vif-deleted-0ccbc684-0a9c-420e-8e3b-877ae7a284e2 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.344652] env[68279]: INFO nova.compute.manager [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Took 25.45 seconds to build instance. [ 1106.417178] env[68279]: INFO nova.compute.manager [-] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Took 1.30 seconds to deallocate network for instance. [ 1106.615764] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.616071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.682897] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52837cdb-7515-e2c3-d52a-f7111b466b8b, 'name': SearchDatastore_Task, 'duration_secs': 0.022941} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.683189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.683475] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 778efb81-2562-4d55-ace0-09722d92fa5b/778efb81-2562-4d55-ace0-09722d92fa5b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1106.683699] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccc6bea5-013c-4848-8194-ef5439be5084 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.690708] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1106.690708] env[68279]: value = "task-2963807" [ 1106.690708] env[68279]: _type = "Task" [ 1106.690708] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.700212] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.846361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fbd77d9f-8f02-407d-8812-e19b3071cd25 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.957s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.927638] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.927638] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.927638] env[68279]: DEBUG nova.objects.instance [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lazy-loading 'resources' on Instance uuid 525e4894-a8b1-45ae-a846-84ded8d97584 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1106.982632] env[68279]: DEBUG nova.network.neutron [-] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.031349] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.031712] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.032024] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.032190] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.032568] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.034804] env[68279]: INFO nova.compute.manager [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Terminating instance [ 1107.120293] env[68279]: DEBUG nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1107.202574] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963807, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.489038] env[68279]: INFO nova.compute.manager [-] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Took 1.41 seconds to deallocate network for instance. [ 1107.539280] env[68279]: DEBUG nova.compute.manager [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1107.539528] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1107.540626] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fe0b82-3621-4368-8803-5edb5fdb9063 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.550789] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1107.551052] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74d88ba7-0069-4170-b1b3-37a29a04d85f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.557548] env[68279]: DEBUG oslo_vmware.api [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1107.557548] env[68279]: value = "task-2963808" [ 1107.557548] env[68279]: _type = "Task" [ 1107.557548] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.565670] env[68279]: DEBUG oslo_vmware.api [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963808, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.592557] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "2d05e318-abef-43b0-9ad3-8c839c372780" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.592759] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.608270] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e986962b-29dd-4837-b280-5501894a8646 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.617803] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beda6f13-a0c7-4dc1-a524-bfe35af71cb2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.656818] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09339679-e420-4583-8b64-a360f5728aeb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.665915] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a189ab-7c7f-427a-8f0a-2426d2889b6a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.671243] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.682339] env[68279]: DEBUG nova.compute.provider_tree [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.701664] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768322} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.701959] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 778efb81-2562-4d55-ace0-09722d92fa5b/778efb81-2562-4d55-ace0-09722d92fa5b.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1107.702229] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1107.702569] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-180f444d-433f-47c7-b2b7-4564a6ee2224 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.710244] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1107.710244] env[68279]: value = "task-2963809" [ 1107.710244] env[68279]: _type = "Task" [ 1107.710244] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.718756] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.813732] env[68279]: DEBUG nova.compute.manager [req-7e1daff4-9871-445b-beb5-bf135dadf83d req-47141a31-cf1e-400c-9c03-326be046957e service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received event network-vif-plugged-b892325d-c8dc-4176-b161-d5d789cd4e40 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.813852] env[68279]: DEBUG oslo_concurrency.lockutils [req-7e1daff4-9871-445b-beb5-bf135dadf83d req-47141a31-cf1e-400c-9c03-326be046957e service nova] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.814081] env[68279]: DEBUG oslo_concurrency.lockutils [req-7e1daff4-9871-445b-beb5-bf135dadf83d req-47141a31-cf1e-400c-9c03-326be046957e service nova] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.814208] env[68279]: DEBUG oslo_concurrency.lockutils [req-7e1daff4-9871-445b-beb5-bf135dadf83d req-47141a31-cf1e-400c-9c03-326be046957e service nova] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.814379] env[68279]: DEBUG nova.compute.manager [req-7e1daff4-9871-445b-beb5-bf135dadf83d req-47141a31-cf1e-400c-9c03-326be046957e service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] No waiting events found dispatching network-vif-plugged-b892325d-c8dc-4176-b161-d5d789cd4e40 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1107.814541] env[68279]: WARNING nova.compute.manager [req-7e1daff4-9871-445b-beb5-bf135dadf83d req-47141a31-cf1e-400c-9c03-326be046957e service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received unexpected event network-vif-plugged-b892325d-c8dc-4176-b161-d5d789cd4e40 for instance with vm_state active and task_state None. [ 1107.996880] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.068120] env[68279]: DEBUG oslo_vmware.api [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963808, 'name': PowerOffVM_Task, 'duration_secs': 0.234174} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.068395] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1108.068570] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1108.068825] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96c03a97-bf9b-43c2-aacb-b5dcb2aafe8c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.096619] env[68279]: DEBUG nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1108.137862] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1108.137862] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1108.138026] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Deleting the datastore file [datastore2] fddf4cb7-cffb-41bb-9806-b8f69579cfef {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.138327] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-486e2969-f426-4943-883b-2e0aca84113b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.145362] env[68279]: DEBUG oslo_vmware.api [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for the task: (returnval){ [ 1108.145362] env[68279]: value = "task-2963811" [ 1108.145362] env[68279]: _type = "Task" [ 1108.145362] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.153787] env[68279]: DEBUG oslo_vmware.api [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.185403] env[68279]: DEBUG nova.scheduler.client.report [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.219645] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069477} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.219902] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1108.220678] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bae7fad-fde0-43c3-b908-04634d0ff041 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.243149] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 778efb81-2562-4d55-ace0-09722d92fa5b/778efb81-2562-4d55-ace0-09722d92fa5b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.243447] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4672cce2-8e0c-4670-b65b-e25f3bf73ef2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.263533] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1108.263533] env[68279]: value = "task-2963812" [ 1108.263533] env[68279]: _type = "Task" [ 1108.263533] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.273665] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963812, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.293577] env[68279]: DEBUG nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Successfully updated port: b892325d-c8dc-4176-b161-d5d789cd4e40 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1108.419814] env[68279]: DEBUG nova.compute.manager [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Received event network-vif-deleted-57af1b7b-23d4-429e-b620-f1d918c431e5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.420084] env[68279]: DEBUG nova.compute.manager [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received event network-changed-b892325d-c8dc-4176-b161-d5d789cd4e40 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.420320] env[68279]: DEBUG nova.compute.manager [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Refreshing instance network info cache due to event network-changed-b892325d-c8dc-4176-b161-d5d789cd4e40. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1108.420560] env[68279]: DEBUG oslo_concurrency.lockutils [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] Acquiring lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.420728] env[68279]: DEBUG oslo_concurrency.lockutils [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] Acquired lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.420938] env[68279]: DEBUG nova.network.neutron [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Refreshing network info cache for port b892325d-c8dc-4176-b161-d5d789cd4e40 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.619788] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.655479] env[68279]: DEBUG oslo_vmware.api [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Task: {'id': task-2963811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.398781} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.655778] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1108.655970] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1108.656186] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1108.656365] env[68279]: INFO nova.compute.manager [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1108.656605] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.656803] env[68279]: DEBUG nova.compute.manager [-] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1108.656896] env[68279]: DEBUG nova.network.neutron [-] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1108.690620] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.763s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.693753] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.022s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.695801] env[68279]: INFO nova.compute.claims [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.709976] env[68279]: INFO nova.scheduler.client.report [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted allocations for instance 525e4894-a8b1-45ae-a846-84ded8d97584 [ 1108.774744] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963812, 'name': ReconfigVM_Task, 'duration_secs': 0.347413} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.774986] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 778efb81-2562-4d55-ace0-09722d92fa5b/778efb81-2562-4d55-ace0-09722d92fa5b.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1108.775639] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67d1476f-cb80-4021-a242-bcdd01667f54 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.782519] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1108.782519] env[68279]: value = "task-2963813" [ 1108.782519] env[68279]: _type = "Task" [ 1108.782519] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.791094] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963813, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.796791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.143068] env[68279]: DEBUG nova.network.neutron [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Added VIF to instance network info cache for port b892325d-c8dc-4176-b161-d5d789cd4e40. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1109.143512] env[68279]: DEBUG nova.network.neutron [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b892325d-c8dc-4176-b161-d5d789cd4e40", "address": "fa:16:3e:2d:dd:eb", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb892325d-c8", "ovs_interfaceid": "b892325d-c8dc-4176-b161-d5d789cd4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.217465] env[68279]: DEBUG oslo_concurrency.lockutils [None req-062ad3ef-4474-4c7c-9c6c-8d84a265aeac tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "525e4894-a8b1-45ae-a846-84ded8d97584" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.973s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.293919] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963813, 'name': Rename_Task, 'duration_secs': 0.152424} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.294230] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1109.294482] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b395d6bd-054d-4313-9d82-035d344f9050 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.301656] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1109.301656] env[68279]: value = "task-2963814" [ 1109.301656] env[68279]: _type = "Task" [ 1109.301656] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.310477] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963814, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.402918] env[68279]: DEBUG nova.network.neutron [-] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.646808] env[68279]: DEBUG oslo_concurrency.lockutils [req-19ab7e5e-4c5f-47d0-b3da-99b52b96dce5 req-a111f118-5b1e-45c7-9871-86b75309f894 service nova] Releasing lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.647318] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.647593] env[68279]: DEBUG nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1109.815144] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963814, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.889265] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd51a2a-0179-4a8b-bacf-e312558c2925 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.900132] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82aa421e-973b-4594-bd29-e8911042cfd3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.905837] env[68279]: INFO nova.compute.manager [-] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Took 1.25 seconds to deallocate network for instance. [ 1109.944576] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ae3aaf-3ad1-4737-8995-d3d13e33a255 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.954237] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400d40c4-1127-4944-b11e-992cff535c5b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.970294] env[68279]: DEBUG nova.compute.provider_tree [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.187517] env[68279]: WARNING nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] c0150443-bd24-4fb2-8b27-b118d22250c3 already exists in list: networks containing: ['c0150443-bd24-4fb2-8b27-b118d22250c3']. ignoring it [ 1110.187730] env[68279]: WARNING nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] c0150443-bd24-4fb2-8b27-b118d22250c3 already exists in list: networks containing: ['c0150443-bd24-4fb2-8b27-b118d22250c3']. ignoring it [ 1110.187901] env[68279]: WARNING nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] b892325d-c8dc-4176-b161-d5d789cd4e40 already exists in list: port_ids containing: ['b892325d-c8dc-4176-b161-d5d789cd4e40']. ignoring it [ 1110.303442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.303608] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.324151] env[68279]: DEBUG oslo_vmware.api [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963814, 'name': PowerOnVM_Task, 'duration_secs': 0.611857} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.324493] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1110.324736] env[68279]: INFO nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1110.324958] env[68279]: DEBUG nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1110.325804] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f063029b-c575-43dd-a493-60511f4ff271 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.413601] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.473492] env[68279]: DEBUG nova.scheduler.client.report [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.482458] env[68279]: DEBUG nova.network.neutron [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b892325d-c8dc-4176-b161-d5d789cd4e40", "address": "fa:16:3e:2d:dd:eb", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb892325d-c8", "ovs_interfaceid": "b892325d-c8dc-4176-b161-d5d789cd4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.763244] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "bfc3f843-3295-4381-8c9f-3bad711603fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.763516] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.775303] env[68279]: DEBUG nova.compute.manager [req-18759295-cdce-474d-9718-7af78a525706 req-06780b78-4480-4445-a222-821d45376c9a service nova] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Received event network-vif-deleted-7cbc8ddd-efea-4295-9a79-1bced1aa3082 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.817832] env[68279]: INFO nova.compute.manager [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Detaching volume 5831cc8f-f303-46ba-a0ca-3334dbc1eeb4 [ 1110.845798] env[68279]: INFO nova.compute.manager [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Took 18.89 seconds to build instance. [ 1110.856434] env[68279]: INFO nova.virt.block_device [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Attempting to driver detach volume 5831cc8f-f303-46ba-a0ca-3334dbc1eeb4 from mountpoint /dev/sdb [ 1110.856632] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1110.856828] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1110.857735] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed4a60d-c591-4dff-9b45-71661e1ca44a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.881254] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b42036f-b209-4914-8e57-230619746674 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.888982] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d29676-1268-4e42-a09b-49ddb89b0caa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.911586] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ae3fac-0d29-40f7-9711-ea43962af18c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.930796] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] The volume has not been displaced from its original location: [datastore1] volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4/volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1110.936164] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1110.936520] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42fbb152-ddd6-4267-bb51-521493ae2ccf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.955763] env[68279]: DEBUG oslo_vmware.api [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1110.955763] env[68279]: value = "task-2963815" [ 1110.955763] env[68279]: _type = "Task" [ 1110.955763] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.966867] env[68279]: DEBUG oslo_vmware.api [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963815, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.983033] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.983622] env[68279]: DEBUG nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1110.986358] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.990s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.986568] env[68279]: DEBUG nova.objects.instance [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lazy-loading 'resources' on Instance uuid 296358b1-e978-409c-8113-587ae8f806c7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.987881] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.988420] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.990073] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.990073] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6b9b5d-a425-4d64-a185-108fc58b1852 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.010641] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1111.010921] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1111.011137] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1111.011344] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1111.011517] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1111.011732] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1111.011974] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1111.012216] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1111.012624] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1111.012624] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1111.012856] env[68279]: DEBUG nova.virt.hardware [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1111.020187] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Reconfiguring VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1111.021320] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01f68570-ae87-4589-b588-ac0c5e556946 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.040502] env[68279]: DEBUG oslo_vmware.api [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1111.040502] env[68279]: value = "task-2963816" [ 1111.040502] env[68279]: _type = "Task" [ 1111.040502] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.049183] env[68279]: DEBUG oslo_vmware.api [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963816, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.265560] env[68279]: DEBUG nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1111.346242] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cdc2bea1-345b-43f6-85f9-25b90d0758e9 tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "778efb81-2562-4d55-ace0-09722d92fa5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.403s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.466830] env[68279]: DEBUG oslo_vmware.api [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963815, 'name': ReconfigVM_Task, 'duration_secs': 0.247499} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.467196] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1111.472261] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef8430a6-5b8a-4b80-94b4-a4a21034b628 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.489826] env[68279]: DEBUG nova.compute.utils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.491353] env[68279]: DEBUG oslo_vmware.api [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1111.491353] env[68279]: value = "task-2963817" [ 1111.491353] env[68279]: _type = "Task" [ 1111.491353] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.491851] env[68279]: DEBUG nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.492067] env[68279]: DEBUG nova.network.neutron [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1111.506766] env[68279]: DEBUG oslo_vmware.api [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.549466] env[68279]: DEBUG nova.policy [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71136053ec324086b94bc03ed7b649bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63e6688a23df4c48af9c2f37a97caeb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1111.559766] env[68279]: DEBUG oslo_vmware.api [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963816, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.703837] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1912dc8-b5a1-43cf-aa3e-1c44d51b2a6a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.712572] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d8795b-2057-4f60-8f2f-d71fae3d7301 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.752716] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a263e0b4-2bec-44df-a401-bc9770aa6abf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.761732] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1e5e72-82e6-4675-96a6-2b755a172d80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.781437] env[68279]: DEBUG nova.compute.provider_tree [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.797285] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5215106d-1095-48a6-d11e-ae9210f506f7/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1111.798167] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfde1d5-c833-4c92-92b4-ee7df759836b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.801397] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.806146] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5215106d-1095-48a6-d11e-ae9210f506f7/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1111.806322] env[68279]: ERROR oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5215106d-1095-48a6-d11e-ae9210f506f7/disk-0.vmdk due to incomplete transfer. [ 1111.806547] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-13077aa5-41b9-494d-aea4-c0daa5be2301 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.813821] env[68279]: DEBUG oslo_vmware.rw_handles [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5215106d-1095-48a6-d11e-ae9210f506f7/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1111.814125] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Uploaded image 4f678577-45a3-48b3-b6bb-321a68ff5e6e to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1111.816157] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1111.816415] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-15b0f1e9-32db-44ce-a00a-00b31c9e69ca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.822266] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1111.822266] env[68279]: value = "task-2963818" [ 1111.822266] env[68279]: _type = "Task" [ 1111.822266] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.830951] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963818, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.851595] env[68279]: DEBUG nova.network.neutron [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Successfully created port: 03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1111.993332] env[68279]: DEBUG nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1112.006965] env[68279]: DEBUG oslo_vmware.api [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963817, 'name': ReconfigVM_Task, 'duration_secs': 0.147114} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.007382] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594716', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'name': 'volume-5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50f390b2-99b7-49f3-997f-7d7b50cff9f2', 'attached_at': '', 'detached_at': '', 'volume_id': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4', 'serial': '5831cc8f-f303-46ba-a0ca-3334dbc1eeb4'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1112.051782] env[68279]: DEBUG oslo_vmware.api [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963816, 'name': ReconfigVM_Task, 'duration_secs': 0.779123} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.052676] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.052676] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Reconfigured VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1112.285886] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "778efb81-2562-4d55-ace0-09722d92fa5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.286194] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "778efb81-2562-4d55-ace0-09722d92fa5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.286407] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "778efb81-2562-4d55-ace0-09722d92fa5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.286590] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "778efb81-2562-4d55-ace0-09722d92fa5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.286768] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "778efb81-2562-4d55-ace0-09722d92fa5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.288932] env[68279]: DEBUG nova.scheduler.client.report [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.293093] env[68279]: INFO nova.compute.manager [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Terminating instance [ 1112.332400] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963818, 'name': Destroy_Task, 'duration_secs': 0.42226} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.332650] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Destroyed the VM [ 1112.332905] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1112.333167] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a813e862-9581-4bee-855f-41256250ee5d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.340213] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1112.340213] env[68279]: value = "task-2963819" [ 1112.340213] env[68279]: _type = "Task" [ 1112.340213] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.347519] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963819, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.553816] env[68279]: DEBUG nova.objects.instance [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'flavor' on Instance uuid 50f390b2-99b7-49f3-997f-7d7b50cff9f2 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.557153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a51fd6f3-66f4-4593-8eaf-ea5d29a2be8d tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-f6a65d1b-ba9c-44b7-b9aa-815cabd45176-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.917s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.793991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.807s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.798631] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.179s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.800257] env[68279]: INFO nova.compute.claims [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.803357] env[68279]: DEBUG nova.compute.manager [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1112.803580] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.804761] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e998b11-541f-481d-82ae-499d2dc32bb7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.812174] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1112.812411] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6c3f0aa-8411-4228-b29a-9b5c36e9617c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.820091] env[68279]: INFO nova.scheduler.client.report [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Deleted allocations for instance 296358b1-e978-409c-8113-587ae8f806c7 [ 1112.821443] env[68279]: DEBUG oslo_vmware.api [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1112.821443] env[68279]: value = "task-2963820" [ 1112.821443] env[68279]: _type = "Task" [ 1112.821443] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.832782] env[68279]: DEBUG oslo_vmware.api [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.850698] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963819, 'name': RemoveSnapshot_Task, 'duration_secs': 0.45149} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.851110] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1112.851402] env[68279]: DEBUG nova.compute.manager [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.852353] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6876184c-b05d-4474-9445-98382cb8dc08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.007998] env[68279]: DEBUG nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1113.035989] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.036279] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.036440] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.036622] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.036767] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.036914] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.037142] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.037312] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.037481] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.037641] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.037814] env[68279]: DEBUG nova.virt.hardware [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.038697] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec1f962-02f8-4496-ae01-932c8bdc8aef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.047082] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a0d534-0f50-4e2b-a89f-da7e232e447d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.282839] env[68279]: DEBUG nova.compute.manager [req-01674bbf-89bb-4bd9-b7aa-a725b4994015 req-8d0f7bd1-9e63-4670-8ae6-621c179f462d service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Received event network-vif-plugged-03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.282839] env[68279]: DEBUG oslo_concurrency.lockutils [req-01674bbf-89bb-4bd9-b7aa-a725b4994015 req-8d0f7bd1-9e63-4670-8ae6-621c179f462d service nova] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.283339] env[68279]: DEBUG oslo_concurrency.lockutils [req-01674bbf-89bb-4bd9-b7aa-a725b4994015 req-8d0f7bd1-9e63-4670-8ae6-621c179f462d service nova] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.283545] env[68279]: DEBUG oslo_concurrency.lockutils [req-01674bbf-89bb-4bd9-b7aa-a725b4994015 req-8d0f7bd1-9e63-4670-8ae6-621c179f462d service nova] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.283725] env[68279]: DEBUG nova.compute.manager [req-01674bbf-89bb-4bd9-b7aa-a725b4994015 req-8d0f7bd1-9e63-4670-8ae6-621c179f462d service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] No waiting events found dispatching network-vif-plugged-03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1113.283962] env[68279]: WARNING nova.compute.manager [req-01674bbf-89bb-4bd9-b7aa-a725b4994015 req-8d0f7bd1-9e63-4670-8ae6-621c179f462d service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Received unexpected event network-vif-plugged-03be8849-6f9f-415f-b7fb-ccc79a5734fd for instance with vm_state building and task_state spawning. [ 1113.332411] env[68279]: DEBUG oslo_concurrency.lockutils [None req-0ee37b2a-0b80-4052-a301-61059e659d2c tempest-ServerAddressesTestJSON-1805717403 tempest-ServerAddressesTestJSON-1805717403-project-member] Lock "296358b1-e978-409c-8113-587ae8f806c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.881s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.338634] env[68279]: DEBUG oslo_vmware.api [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963820, 'name': PowerOffVM_Task, 'duration_secs': 0.187604} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.338890] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1113.339065] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1113.339326] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db631bcf-0536-4ae3-917f-8c20d532203b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.367292] env[68279]: DEBUG nova.network.neutron [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Successfully updated port: 03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1113.368728] env[68279]: INFO nova.compute.manager [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Shelve offloading [ 1113.411207] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1113.411207] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1113.411207] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleting the datastore file [datastore1] 778efb81-2562-4d55-ace0-09722d92fa5b {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1113.411388] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acce8b57-1a8a-47b2-834a-6f78e5a5dd9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.419736] env[68279]: DEBUG oslo_vmware.api [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for the task: (returnval){ [ 1113.419736] env[68279]: value = "task-2963822" [ 1113.419736] env[68279]: _type = "Task" [ 1113.419736] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.428599] env[68279]: DEBUG oslo_vmware.api [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.567116] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b96ef98a-7216-4c5e-b335-7a73a086f974 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.263s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.872219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.872219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.872650] env[68279]: DEBUG nova.network.neutron [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.876430] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1113.877304] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e35d99c-7161-4798-9289-042fad127850 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.884170] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1113.884170] env[68279]: value = "task-2963823" [ 1113.884170] env[68279]: _type = "Task" [ 1113.884170] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.896460] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1113.896460] env[68279]: DEBUG nova.compute.manager [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.897162] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8c6700-798d-4c69-9e75-4f967973b1d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.905333] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.905505] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.905676] env[68279]: DEBUG nova.network.neutron [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.929461] env[68279]: DEBUG oslo_vmware.api [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Task: {'id': task-2963822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163412} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.929862] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.930219] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.930484] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.930776] env[68279]: INFO nova.compute.manager [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1113.931203] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.934132] env[68279]: DEBUG nova.compute.manager [-] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.934260] env[68279]: DEBUG nova.network.neutron [-] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.995367] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de48371-716a-45f6-bc39-3293026c81cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.003284] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef058d5-ea7a-4396-9448-fae49eeb5c17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.034973] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.035198] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.035392] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.035561] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.035781] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.037801] env[68279]: INFO nova.compute.manager [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Terminating instance [ 1114.039976] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7daf5fd2-8df0-4ece-8b03-b3e33a4a623d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.045597] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-f6a65d1b-ba9c-44b7-b9aa-815cabd45176-b892325d-c8dc-4176-b161-d5d789cd4e40" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.045791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-f6a65d1b-ba9c-44b7-b9aa-815cabd45176-b892325d-c8dc-4176-b161-d5d789cd4e40" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.055088] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78963bed-0c47-438e-ab9a-c825ae16a713 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.068465] env[68279]: DEBUG nova.compute.provider_tree [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.408533] env[68279]: DEBUG nova.network.neutron [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1114.548656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.548864] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.549838] env[68279]: DEBUG nova.compute.manager [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1114.550268] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.551176] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f193dcf-699f-4d7b-8e6e-a40a063a7d97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.555871] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5fa89d-e024-4812-9ad6-933023b70828 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.577391] env[68279]: DEBUG nova.scheduler.client.report [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.581289] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.586937] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecd65f5-8004-45d5-a490-d1cfc477bf36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.589385] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a186bd89-fa9b-476a-bf03-0e9dfb21f86c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.619913] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Reconfiguring VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1114.621860] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c36208c1-cd38-4e00-bfc5-51e7f7e2e060 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.636556] env[68279]: DEBUG oslo_vmware.api [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1114.636556] env[68279]: value = "task-2963824" [ 1114.636556] env[68279]: _type = "Task" [ 1114.636556] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.644470] env[68279]: DEBUG nova.network.neutron [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updating instance_info_cache with network_info: [{"id": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "address": "fa:16:3e:da:6e:96", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03be8849-6f", "ovs_interfaceid": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.647160] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1114.647160] env[68279]: value = "task-2963825" [ 1114.647160] env[68279]: _type = "Task" [ 1114.647160] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.655503] env[68279]: DEBUG oslo_vmware.api [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.662122] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.748718] env[68279]: DEBUG nova.network.neutron [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343369ce-f2", "ovs_interfaceid": "343369ce-f2d1-401a-9a78-b72854001a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.816531] env[68279]: DEBUG nova.network.neutron [-] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.083494] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.285s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.083917] env[68279]: DEBUG nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1115.086834] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.673s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.087076] env[68279]: DEBUG nova.objects.instance [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lazy-loading 'resources' on Instance uuid fddf4cb7-cffb-41bb-9806-b8f69579cfef {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.150628] env[68279]: DEBUG oslo_vmware.api [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963824, 'name': PowerOffVM_Task, 'duration_secs': 0.174224} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.153744] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.153942] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.154195] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a29e0c0-9041-4dc9-8c16-f6192e463ae0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.156105] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.156525] env[68279]: DEBUG nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Instance network_info: |[{"id": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "address": "fa:16:3e:da:6e:96", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03be8849-6f", "ovs_interfaceid": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1115.156764] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:6e:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03be8849-6f9f-415f-b7fb-ccc79a5734fd', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1115.164091] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.167894] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1115.168167] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.168370] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e122ef61-24a0-43d8-b449-bd2c779c85de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.187818] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1115.187818] env[68279]: value = "task-2963827" [ 1115.187818] env[68279]: _type = "Task" [ 1115.187818] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.195400] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963827, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.231039] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.231283] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.231434] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleting the datastore file [datastore2] 50f390b2-99b7-49f3-997f-7d7b50cff9f2 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.231706] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-076a353d-e666-4cbf-b665-dc3012887057 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.238942] env[68279]: DEBUG oslo_vmware.api [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1115.238942] env[68279]: value = "task-2963828" [ 1115.238942] env[68279]: _type = "Task" [ 1115.238942] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.248038] env[68279]: DEBUG oslo_vmware.api [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.251451] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.315926] env[68279]: DEBUG nova.compute.manager [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Received event network-changed-03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.316106] env[68279]: DEBUG nova.compute.manager [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Refreshing instance network info cache due to event network-changed-03be8849-6f9f-415f-b7fb-ccc79a5734fd. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1115.316469] env[68279]: DEBUG oslo_concurrency.lockutils [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] Acquiring lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.316678] env[68279]: DEBUG oslo_concurrency.lockutils [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] Acquired lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.316909] env[68279]: DEBUG nova.network.neutron [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Refreshing network info cache for port 03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1115.323210] env[68279]: INFO nova.compute.manager [-] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Took 1.39 seconds to deallocate network for instance. [ 1115.590281] env[68279]: DEBUG nova.compute.utils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1115.594563] env[68279]: DEBUG nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1115.594739] env[68279]: DEBUG nova.network.neutron [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1115.597159] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.598419] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d29a2b-bce2-4f26-afbb-08dad9ff3272 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.607157] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.607397] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2bf38b9-f23e-4072-b382-e8b0bb71b695 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.637219] env[68279]: DEBUG nova.policy [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4d9c39cb0a142eab4370307dd41cf0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd517424aba641e4b867e440ba0ee7ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1115.659866] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.670389] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.670517] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.671158] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleting the datastore file [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.671158] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62466ad0-4246-435f-bd16-2d004d2256de {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.678753] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1115.678753] env[68279]: value = "task-2963830" [ 1115.678753] env[68279]: _type = "Task" [ 1115.678753] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.688733] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963830, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.699248] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963827, 'name': CreateVM_Task, 'duration_secs': 0.307818} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.699434] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1115.700135] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.700324] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.700657] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1115.700934] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b4cad2d-6008-4153-9874-43ee5e5178bc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.707569] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1115.707569] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523bc2c9-6b3f-4326-c645-dd53c773abe5" [ 1115.707569] env[68279]: _type = "Task" [ 1115.707569] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.715682] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523bc2c9-6b3f-4326-c645-dd53c773abe5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.752813] env[68279]: DEBUG oslo_vmware.api [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.376699} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.753213] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.753291] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.753581] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.753655] env[68279]: INFO nova.compute.manager [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1115.754379] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.754379] env[68279]: DEBUG nova.compute.manager [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1115.754379] env[68279]: DEBUG nova.network.neutron [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1115.784017] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39c0540-dca1-4c68-8600-dbcdbb8de945 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.791320] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8c64ac-e0bc-4d06-81c0-ade957c29c76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.826206] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382e53cc-657d-4bac-bc27-8d6b238fe9ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.833566] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.835885] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0dc335-5050-4a95-b59d-14072b4fc933 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.852421] env[68279]: DEBUG nova.compute.provider_tree [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.040960] env[68279]: DEBUG nova.network.neutron [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Successfully created port: 633e0597-c529-4822-bb62-9eb5fe78047c {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1116.098803] env[68279]: DEBUG nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1116.107142] env[68279]: DEBUG nova.network.neutron [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updated VIF entry in instance network info cache for port 03be8849-6f9f-415f-b7fb-ccc79a5734fd. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1116.107142] env[68279]: DEBUG nova.network.neutron [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updating instance_info_cache with network_info: [{"id": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "address": "fa:16:3e:da:6e:96", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03be8849-6f", "ovs_interfaceid": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.161188] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.189177] env[68279]: DEBUG oslo_vmware.api [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963830, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134366} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.189422] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.189602] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.189775] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.218070] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523bc2c9-6b3f-4326-c645-dd53c773abe5, 'name': SearchDatastore_Task, 'duration_secs': 0.018045} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.218358] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.218587] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1116.218820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.218960] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.219147] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1116.219406] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0111ff2e-b0b4-47d0-ad08-e2bed546ded0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.223777] env[68279]: INFO nova.scheduler.client.report [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted allocations for instance dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba [ 1116.228208] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1116.228383] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1116.229159] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbbbef0-f8c6-410c-a236-1810946440ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.235467] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1116.235467] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52afdc39-9833-e585-1a66-f8662cee8ed4" [ 1116.235467] env[68279]: _type = "Task" [ 1116.235467] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.249909] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52afdc39-9833-e585-1a66-f8662cee8ed4, 'name': SearchDatastore_Task, 'duration_secs': 0.009534} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.256037] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8c9efa7-615f-4b22-87c6-489ad08c209d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.258993] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1116.258993] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52480ac0-1441-906f-4da1-c8a6c7eecf4f" [ 1116.258993] env[68279]: _type = "Task" [ 1116.258993] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.268858] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52480ac0-1441-906f-4da1-c8a6c7eecf4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.358218] env[68279]: DEBUG nova.scheduler.client.report [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.615359] env[68279]: DEBUG oslo_concurrency.lockutils [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] Releasing lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.616081] env[68279]: DEBUG nova.compute.manager [req-ad266a29-a3e0-40d3-982a-360db739f521 req-527d4d6e-e24e-4946-9d64-66eab888596c service nova] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Received event network-vif-deleted-99b8877f-8923-4dc7-8f41-91034ef1aa8f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1116.665342] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.727872] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.769617] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52480ac0-1441-906f-4da1-c8a6c7eecf4f, 'name': SearchDatastore_Task, 'duration_secs': 0.011206} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.769966] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.770154] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 11bbfd41-52bb-410c-b368-1473a309d6a7/11bbfd41-52bb-410c-b368-1473a309d6a7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1116.770415] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06d814b1-7367-49e6-b2eb-351cf6496002 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.777845] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1116.777845] env[68279]: value = "task-2963831" [ 1116.777845] env[68279]: _type = "Task" [ 1116.777845] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.786053] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.849441] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "7c9c6661-2e52-4dba-8671-26f69d089903" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.850306] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "7c9c6661-2e52-4dba-8671-26f69d089903" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.864174] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.777s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.866623] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.065s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.869058] env[68279]: INFO nova.compute.claims [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1116.889421] env[68279]: INFO nova.scheduler.client.report [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Deleted allocations for instance fddf4cb7-cffb-41bb-9806-b8f69579cfef [ 1116.991615] env[68279]: DEBUG nova.network.neutron [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.117540] env[68279]: DEBUG nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1117.153619] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1117.154016] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1117.154305] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.154589] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1117.154864] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.155186] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1117.155508] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1117.155802] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1117.156196] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1117.156409] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1117.156697] env[68279]: DEBUG nova.virt.hardware [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1117.157573] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.157851] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.160270] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612e4636-e7fb-4724-af1d-6ecd596c4449 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.183904] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8beec0ae-6b04-4902-a1a9-a1cbd4e20fbf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.191166] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.290123] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963831, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.350511] env[68279]: DEBUG nova.compute.manager [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-vif-unplugged-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.350511] env[68279]: DEBUG oslo_concurrency.lockutils [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.350511] env[68279]: DEBUG oslo_concurrency.lockutils [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.350669] env[68279]: DEBUG oslo_concurrency.lockutils [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.350807] env[68279]: DEBUG nova.compute.manager [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] No waiting events found dispatching network-vif-unplugged-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1117.350977] env[68279]: WARNING nova.compute.manager [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received unexpected event network-vif-unplugged-343369ce-f2d1-401a-9a78-b72854001a75 for instance with vm_state shelved_offloaded and task_state None. [ 1117.351213] env[68279]: DEBUG nova.compute.manager [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-changed-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.351371] env[68279]: DEBUG nova.compute.manager [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing instance network info cache due to event network-changed-343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1117.351546] env[68279]: DEBUG oslo_concurrency.lockutils [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] Acquiring lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.351869] env[68279]: DEBUG oslo_concurrency.lockutils [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] Acquired lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.351869] env[68279]: DEBUG nova.network.neutron [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.353520] env[68279]: DEBUG nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1117.399017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c612e89e-7a67-4bb0-a1ab-99e9fed657e9 tempest-ServerAddressesNegativeTestJSON-976579573 tempest-ServerAddressesNegativeTestJSON-976579573-project-member] Lock "fddf4cb7-cffb-41bb-9806-b8f69579cfef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.367s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.494115] env[68279]: INFO nova.compute.manager [-] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Took 1.74 seconds to deallocate network for instance. [ 1117.665104] env[68279]: DEBUG nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1117.675958] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.791082] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600252} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.791935] env[68279]: DEBUG nova.network.neutron [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Successfully updated port: 633e0597-c529-4822-bb62-9eb5fe78047c {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1117.793058] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 11bbfd41-52bb-410c-b368-1473a309d6a7/11bbfd41-52bb-410c-b368-1473a309d6a7.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1117.793277] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1117.793524] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f208aae7-4bd6-4324-97df-1447c3dc9ea6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.801221] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1117.801221] env[68279]: value = "task-2963832" [ 1117.801221] env[68279]: _type = "Task" [ 1117.801221] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.811880] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.882227] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.005023] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.056988] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4960ed-ac19-41e9-8ab6-82f1ded422c4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.067190] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aba95f-d65c-41d6-9901-cca9a84c5f93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.099098] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f78f980-73bf-4e48-aebf-3f6e693cb809 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.102209] env[68279]: DEBUG nova.network.neutron [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updated VIF entry in instance network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.102478] env[68279]: DEBUG nova.network.neutron [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap343369ce-f2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.108997] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8f431c-7d45-46ae-924c-deaa9813acf1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.124719] env[68279]: DEBUG nova.compute.provider_tree [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.173613] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.198097] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.296884] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.297084] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.297243] env[68279]: DEBUG nova.network.neutron [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1118.313826] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075289} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.314742] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1118.315544] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366bfe99-52ce-42eb-b91d-e2a1768c6595 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.343140] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 11bbfd41-52bb-410c-b368-1473a309d6a7/11bbfd41-52bb-410c-b368-1473a309d6a7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.343736] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10a5ef87-1aff-4ab3-829d-d6d14b4bf24b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.363966] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1118.363966] env[68279]: value = "task-2963833" [ 1118.363966] env[68279]: _type = "Task" [ 1118.363966] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.373247] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963833, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.609221] env[68279]: DEBUG oslo_concurrency.lockutils [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] Releasing lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.609466] env[68279]: DEBUG nova.compute.manager [req-08f91caf-5ff3-4277-a41c-8c37cb423d27 req-a6ff18d6-ef2a-4b8c-a855-c218b405c348 service nova] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Received event network-vif-deleted-5959e66b-7a16-41ba-8c1b-adbc5941455e {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1118.633751] env[68279]: DEBUG nova.scheduler.client.report [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1118.637042] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.677086] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.847426] env[68279]: DEBUG nova.network.neutron [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1118.877316] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963833, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.005930] env[68279]: DEBUG nova.network.neutron [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance_info_cache with network_info: [{"id": "633e0597-c529-4822-bb62-9eb5fe78047c", "address": "fa:16:3e:4b:15:38", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633e0597-c5", "ovs_interfaceid": "633e0597-c529-4822-bb62-9eb5fe78047c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.138930] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.139482] env[68279]: DEBUG nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1119.142280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.309s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.142997] env[68279]: DEBUG nova.objects.instance [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lazy-loading 'resources' on Instance uuid 778efb81-2562-4d55-ace0-09722d92fa5b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.172474] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.375496] env[68279]: DEBUG nova.compute.manager [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Received event network-vif-plugged-633e0597-c529-4822-bb62-9eb5fe78047c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1119.375662] env[68279]: DEBUG oslo_concurrency.lockutils [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] Acquiring lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.375938] env[68279]: DEBUG oslo_concurrency.lockutils [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] Lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.376173] env[68279]: DEBUG oslo_concurrency.lockutils [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] Lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.376299] env[68279]: DEBUG nova.compute.manager [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] No waiting events found dispatching network-vif-plugged-633e0597-c529-4822-bb62-9eb5fe78047c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1119.376463] env[68279]: WARNING nova.compute.manager [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Received unexpected event network-vif-plugged-633e0597-c529-4822-bb62-9eb5fe78047c for instance with vm_state building and task_state spawning. [ 1119.376619] env[68279]: DEBUG nova.compute.manager [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Received event network-changed-633e0597-c529-4822-bb62-9eb5fe78047c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1119.376770] env[68279]: DEBUG nova.compute.manager [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Refreshing instance network info cache due to event network-changed-633e0597-c529-4822-bb62-9eb5fe78047c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1119.376934] env[68279]: DEBUG oslo_concurrency.lockutils [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] Acquiring lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.380639] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963833, 'name': ReconfigVM_Task, 'duration_secs': 0.595073} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.380890] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 11bbfd41-52bb-410c-b368-1473a309d6a7/11bbfd41-52bb-410c-b368-1473a309d6a7.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1119.381471] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a28bcaf6-e79a-48d1-a5ae-0e64476cda49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.388559] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1119.388559] env[68279]: value = "task-2963834" [ 1119.388559] env[68279]: _type = "Task" [ 1119.388559] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.395727] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963834, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.508317] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.508663] env[68279]: DEBUG nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Instance network_info: |[{"id": "633e0597-c529-4822-bb62-9eb5fe78047c", "address": "fa:16:3e:4b:15:38", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633e0597-c5", "ovs_interfaceid": "633e0597-c529-4822-bb62-9eb5fe78047c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1119.508988] env[68279]: DEBUG oslo_concurrency.lockutils [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] Acquired lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.509196] env[68279]: DEBUG nova.network.neutron [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Refreshing network info cache for port 633e0597-c529-4822-bb62-9eb5fe78047c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1119.510526] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:15:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55764410-260e-4339-a020-6b30995584bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '633e0597-c529-4822-bb62-9eb5fe78047c', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1119.518620] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1119.522178] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1119.522178] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f961810a-bb59-4d8f-8ff3-6cd88b066c7c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.541517] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1119.541517] env[68279]: value = "task-2963835" [ 1119.541517] env[68279]: _type = "Task" [ 1119.541517] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.551390] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963835, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.646047] env[68279]: DEBUG nova.compute.utils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1119.650815] env[68279]: DEBUG nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1119.650815] env[68279]: DEBUG nova.network.neutron [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1119.676899] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.718154] env[68279]: DEBUG nova.policy [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36b39430ec184c72b8950247fd1added', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37b1b1fd2ea44d83b954e5b90ae9e3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1119.806116] env[68279]: DEBUG nova.network.neutron [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updated VIF entry in instance network info cache for port 633e0597-c529-4822-bb62-9eb5fe78047c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1119.806511] env[68279]: DEBUG nova.network.neutron [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance_info_cache with network_info: [{"id": "633e0597-c529-4822-bb62-9eb5fe78047c", "address": "fa:16:3e:4b:15:38", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633e0597-c5", "ovs_interfaceid": "633e0597-c529-4822-bb62-9eb5fe78047c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.840413] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad9c28b-4cd9-4b41-ba72-9dd39d87e44d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.848821] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f0c65e-b774-4173-bc14-db4aa4415331 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.880687] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c50ea2e-96a6-4354-ab85-617a5f1360f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.888310] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9548c3-b6d0-44ba-a2a8-dc3457616c27 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.905247] env[68279]: DEBUG nova.compute.provider_tree [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.910922] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963834, 'name': Rename_Task, 'duration_secs': 0.142859} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.910922] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1119.911055] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5780be5-390b-4760-89d7-9e054ec429d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.917312] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1119.917312] env[68279]: value = "task-2963836" [ 1119.917312] env[68279]: _type = "Task" [ 1119.917312] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.928163] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.042560] env[68279]: DEBUG nova.network.neutron [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Successfully created port: 3d031948-681f-43fd-91d7-c88b8b59d70c {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1120.054406] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963835, 'name': CreateVM_Task, 'duration_secs': 0.392018} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.054614] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1120.055284] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.055468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.055777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1120.056044] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79509089-f736-4627-8d8f-fc27efa45158 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.060356] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1120.060356] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ce3b0-6b9b-6947-27c7-6cab527daee0" [ 1120.060356] env[68279]: _type = "Task" [ 1120.060356] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.068090] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ce3b0-6b9b-6947-27c7-6cab527daee0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.151058] env[68279]: DEBUG nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1120.172235] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.310171] env[68279]: DEBUG oslo_concurrency.lockutils [req-9fe0c9f9-da4f-4c4b-bbd3-6b6b41ce6d63 req-99a165dd-b734-48e3-b27a-2c76b6e72365 service nova] Releasing lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.411914] env[68279]: DEBUG nova.scheduler.client.report [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.430379] env[68279]: DEBUG oslo_vmware.api [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963836, 'name': PowerOnVM_Task, 'duration_secs': 0.451101} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.431246] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1120.431454] env[68279]: INFO nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Took 7.42 seconds to spawn the instance on the hypervisor. [ 1120.431633] env[68279]: DEBUG nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1120.432491] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa30eec-2ff4-43ba-a7cc-d62c4f69d722 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.570260] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525ce3b0-6b9b-6947-27c7-6cab527daee0, 'name': SearchDatastore_Task, 'duration_secs': 0.009899} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.570618] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.570875] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1120.571125] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.571273] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.571448] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1120.571699] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3695a16-9778-4feb-9e0b-9821640a6927 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.580459] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1120.580459] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1120.581168] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b388dab-1fca-44e7-8ad2-5d567cb96eee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.585922] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1120.585922] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52826129-205a-78ca-c7ab-40c85fcf3843" [ 1120.585922] env[68279]: _type = "Task" [ 1120.585922] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.593100] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52826129-205a-78ca-c7ab-40c85fcf3843, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.672402] env[68279]: DEBUG oslo_vmware.api [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963825, 'name': ReconfigVM_Task, 'duration_secs': 5.746295} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.672642] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.672852] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Reconfigured VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1120.920440] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.778s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.922781] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.195s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.923028] env[68279]: DEBUG nova.objects.instance [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'resources' on Instance uuid dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.948921] env[68279]: INFO nova.scheduler.client.report [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Deleted allocations for instance 778efb81-2562-4d55-ace0-09722d92fa5b [ 1120.956067] env[68279]: INFO nova.compute.manager [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Took 13.33 seconds to build instance. [ 1121.098288] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52826129-205a-78ca-c7ab-40c85fcf3843, 'name': SearchDatastore_Task, 'duration_secs': 0.008181} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.099156] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ade08fab-8fa1-4f56-aaf2-ad1e6687e20f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.105632] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1121.105632] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b997f7-9193-090c-dd4a-f4f7896abbfb" [ 1121.105632] env[68279]: _type = "Task" [ 1121.105632] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.113704] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b997f7-9193-090c-dd4a-f4f7896abbfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.163069] env[68279]: DEBUG nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1121.190939] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1121.190939] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1121.190939] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1121.190939] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1121.190939] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1121.192559] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1121.192559] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1121.192559] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1121.192559] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1121.192559] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1121.192559] env[68279]: DEBUG nova.virt.hardware [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1121.192854] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89691df5-24ee-4874-aa1c-20436f279bf3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.200962] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48fb2c3-3111-416d-9e97-b9c9b0a5501f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.427677] env[68279]: DEBUG nova.objects.instance [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'numa_topology' on Instance uuid dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.441071] env[68279]: DEBUG nova.compute.manager [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received event network-vif-deleted-b892325d-c8dc-4176-b161-d5d789cd4e40 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.441071] env[68279]: INFO nova.compute.manager [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Neutron deleted interface b892325d-c8dc-4176-b161-d5d789cd4e40; detaching it from the instance and deleting it from the info cache [ 1121.442113] env[68279]: DEBUG nova.network.neutron [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.458295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f7c0b5ef-bbff-4fbe-988f-2ea9f88aeb5f tempest-ServerDiskConfigTestJSON-901511608 tempest-ServerDiskConfigTestJSON-901511608-project-member] Lock "778efb81-2562-4d55-ace0-09722d92fa5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.171s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.459122] env[68279]: DEBUG oslo_concurrency.lockutils [None req-55c973d5-972e-4a17-9489-2030b946b499 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.843s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.583695] env[68279]: DEBUG nova.network.neutron [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Successfully updated port: 3d031948-681f-43fd-91d7-c88b8b59d70c {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1121.618143] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b997f7-9193-090c-dd4a-f4f7896abbfb, 'name': SearchDatastore_Task, 'duration_secs': 0.011053} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.619274] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.619618] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 2d05e318-abef-43b0-9ad3-8c839c372780/2d05e318-abef-43b0-9ad3-8c839c372780.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1121.620792] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2730c5ab-484a-485e-b605-cf22b1c9bfb2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.626857] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1121.626857] env[68279]: value = "task-2963837" [ 1121.626857] env[68279]: _type = "Task" [ 1121.626857] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.635173] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.798996] env[68279]: DEBUG nova.compute.manager [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Received event network-changed-03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.799203] env[68279]: DEBUG nova.compute.manager [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Refreshing instance network info cache due to event network-changed-03be8849-6f9f-415f-b7fb-ccc79a5734fd. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1121.799470] env[68279]: DEBUG oslo_concurrency.lockutils [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] Acquiring lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.799647] env[68279]: DEBUG oslo_concurrency.lockutils [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] Acquired lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.799976] env[68279]: DEBUG nova.network.neutron [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Refreshing network info cache for port 03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1121.844258] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.844258] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.844492] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.844863] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.845061] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.847351] env[68279]: INFO nova.compute.manager [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Terminating instance [ 1121.886639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.886896] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.887025] env[68279]: DEBUG nova.network.neutron [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.929871] env[68279]: DEBUG nova.objects.base [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1121.948618] env[68279]: DEBUG oslo_concurrency.lockutils [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] Acquiring lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.087812] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "refresh_cache-bfc3f843-3295-4381-8c9f-3bad711603fc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.088018] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "refresh_cache-bfc3f843-3295-4381-8c9f-3bad711603fc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.088182] env[68279]: DEBUG nova.network.neutron [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1122.137983] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499853} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.140690] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 2d05e318-abef-43b0-9ad3-8c839c372780/2d05e318-abef-43b0-9ad3-8c839c372780.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1122.140919] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1122.141372] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b74a493-1880-4b4f-9e78-71d94f80548e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.149931] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1122.149931] env[68279]: value = "task-2963838" [ 1122.149931] env[68279]: _type = "Task" [ 1122.149931] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.157775] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3e98cc-80d5-47e8-8da4-673138c0ea56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.164702] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963838, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.171192] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334b85ef-4538-4939-811f-d4ad0f54b45e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.202975] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a73ceee-faeb-450c-b9f3-198362d451ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.211096] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07547ca-1efc-4c99-bf9b-29e89a74caaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.226647] env[68279]: DEBUG nova.compute.provider_tree [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.350824] env[68279]: DEBUG nova.compute.manager [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1122.351074] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.352087] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3678e46-1843-45ce-878e-3560a2ac51be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.359997] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.360271] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-262a2c4b-82ef-4912-8395-30dde5b2ec28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.367344] env[68279]: DEBUG oslo_vmware.api [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1122.367344] env[68279]: value = "task-2963839" [ 1122.367344] env[68279]: _type = "Task" [ 1122.367344] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.376682] env[68279]: DEBUG oslo_vmware.api [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963839, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.642507] env[68279]: DEBUG nova.network.neutron [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1122.660608] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963838, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074013} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.660944] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1122.663597] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0c1c34-7656-4968-9d83-3f6b03f3786c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.686921] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 2d05e318-abef-43b0-9ad3-8c839c372780/2d05e318-abef-43b0-9ad3-8c839c372780.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1122.689843] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71ecb853-bba9-4a60-be26-e438201a5114 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.712901] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1122.712901] env[68279]: value = "task-2963840" [ 1122.712901] env[68279]: _type = "Task" [ 1122.712901] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.726927] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963840, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.729834] env[68279]: DEBUG nova.scheduler.client.report [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.746623] env[68279]: DEBUG nova.network.neutron [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updated VIF entry in instance network info cache for port 03be8849-6f9f-415f-b7fb-ccc79a5734fd. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1122.746974] env[68279]: DEBUG nova.network.neutron [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updating instance_info_cache with network_info: [{"id": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "address": "fa:16:3e:da:6e:96", "network": {"id": "1097f6ad-3c5c-4c74-993b-79f1ec7b22d4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-267180150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63e6688a23df4c48af9c2f37a97caeb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03be8849-6f", "ovs_interfaceid": "03be8849-6f9f-415f-b7fb-ccc79a5734fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.815275] env[68279]: DEBUG nova.network.neutron [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Updating instance_info_cache with network_info: [{"id": "3d031948-681f-43fd-91d7-c88b8b59d70c", "address": "fa:16:3e:1f:62:0b", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d031948-68", "ovs_interfaceid": "3d031948-681f-43fd-91d7-c88b8b59d70c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.836919] env[68279]: DEBUG nova.network.neutron [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [{"id": "7243843a-c48e-44d5-990f-1de0a9191cbd", "address": "fa:16:3e:2a:d5:40", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7243843a-c4", "ovs_interfaceid": "7243843a-c48e-44d5-990f-1de0a9191cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.877349] env[68279]: DEBUG oslo_vmware.api [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963839, 'name': PowerOffVM_Task, 'duration_secs': 0.245594} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.877605] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.877770] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.878057] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd47d287-af34-44a8-a874-e2fd588064ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.949186] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.949447] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.949610] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleting the datastore file [datastore2] f6a65d1b-ba9c-44b7-b9aa-815cabd45176 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.950042] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9480d72-b009-4b54-b432-df85efc38d73 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.957011] env[68279]: DEBUG oslo_vmware.api [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1122.957011] env[68279]: value = "task-2963842" [ 1122.957011] env[68279]: _type = "Task" [ 1122.957011] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.965779] env[68279]: DEBUG oslo_vmware.api [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.226435] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963840, 'name': ReconfigVM_Task, 'duration_secs': 0.291546} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.226711] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 2d05e318-abef-43b0-9ad3-8c839c372780/2d05e318-abef-43b0-9ad3-8c839c372780.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1123.227355] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14ac3f0c-54e0-49cd-aecd-3b5a43771ce4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.235019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.312s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.237696] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1123.237696] env[68279]: value = "task-2963843" [ 1123.237696] env[68279]: _type = "Task" [ 1123.237696] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.237897] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.356s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.239298] env[68279]: INFO nova.compute.claims [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.252579] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963843, 'name': Rename_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.253064] env[68279]: DEBUG oslo_concurrency.lockutils [req-ff718946-06a7-494f-995f-3d974b323f6c req-35deeb6b-3fb1-4dc4-8ce1-868cca7630cc service nova] Releasing lock "refresh_cache-11bbfd41-52bb-410c-b368-1473a309d6a7" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.317812] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "refresh_cache-bfc3f843-3295-4381-8c9f-3bad711603fc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.318151] env[68279]: DEBUG nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Instance network_info: |[{"id": "3d031948-681f-43fd-91d7-c88b8b59d70c", "address": "fa:16:3e:1f:62:0b", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d031948-68", "ovs_interfaceid": "3d031948-681f-43fd-91d7-c88b8b59d70c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1123.318899] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:62:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d031948-681f-43fd-91d7-c88b8b59d70c', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1123.327098] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1123.327324] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1123.327660] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dcc86eb-6a50-4f5b-a1be-7df56a1aaf43 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.344051] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.352909] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1123.352909] env[68279]: value = "task-2963844" [ 1123.352909] env[68279]: _type = "Task" [ 1123.352909] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.362396] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963844, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.468832] env[68279]: DEBUG oslo_vmware.api [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177952} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.469690] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.469969] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.470192] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.470408] env[68279]: INFO nova.compute.manager [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1123.470655] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1123.472055] env[68279]: DEBUG nova.compute.manager [-] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1123.472163] env[68279]: DEBUG nova.network.neutron [-] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1123.474768] env[68279]: DEBUG nova.compute.manager [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Received event network-vif-plugged-3d031948-681f-43fd-91d7-c88b8b59d70c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1123.474971] env[68279]: DEBUG oslo_concurrency.lockutils [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] Acquiring lock "bfc3f843-3295-4381-8c9f-3bad711603fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.475191] env[68279]: DEBUG oslo_concurrency.lockutils [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.475355] env[68279]: DEBUG oslo_concurrency.lockutils [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.475516] env[68279]: DEBUG nova.compute.manager [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] No waiting events found dispatching network-vif-plugged-3d031948-681f-43fd-91d7-c88b8b59d70c {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1123.475679] env[68279]: WARNING nova.compute.manager [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Received unexpected event network-vif-plugged-3d031948-681f-43fd-91d7-c88b8b59d70c for instance with vm_state building and task_state spawning. [ 1123.475836] env[68279]: DEBUG nova.compute.manager [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Received event network-changed-3d031948-681f-43fd-91d7-c88b8b59d70c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1123.475986] env[68279]: DEBUG nova.compute.manager [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Refreshing instance network info cache due to event network-changed-3d031948-681f-43fd-91d7-c88b8b59d70c. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1123.476183] env[68279]: DEBUG oslo_concurrency.lockutils [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] Acquiring lock "refresh_cache-bfc3f843-3295-4381-8c9f-3bad711603fc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.476316] env[68279]: DEBUG oslo_concurrency.lockutils [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] Acquired lock "refresh_cache-bfc3f843-3295-4381-8c9f-3bad711603fc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.476469] env[68279]: DEBUG nova.network.neutron [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Refreshing network info cache for port 3d031948-681f-43fd-91d7-c88b8b59d70c {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1123.749466] env[68279]: DEBUG oslo_concurrency.lockutils [None req-75fd389e-70f9-4838-b141-c74be2397d42 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.493s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.756389] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.119s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.756639] env[68279]: INFO nova.compute.manager [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Unshelving [ 1123.767103] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963843, 'name': Rename_Task, 'duration_secs': 0.156145} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.767425] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.767612] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-374f5318-386a-45f3-8e1f-97f058c6a4d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.775246] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1123.775246] env[68279]: value = "task-2963845" [ 1123.775246] env[68279]: _type = "Task" [ 1123.775246] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.783630] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963845, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.849516] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a43fd5bf-36da-4b93-a38e-0991ef6ca9db tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-f6a65d1b-ba9c-44b7-b9aa-815cabd45176-b892325d-c8dc-4176-b161-d5d789cd4e40" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.803s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.866252] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963844, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.210109] env[68279]: DEBUG nova.network.neutron [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Updated VIF entry in instance network info cache for port 3d031948-681f-43fd-91d7-c88b8b59d70c. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1124.210527] env[68279]: DEBUG nova.network.neutron [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Updating instance_info_cache with network_info: [{"id": "3d031948-681f-43fd-91d7-c88b8b59d70c", "address": "fa:16:3e:1f:62:0b", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d031948-68", "ovs_interfaceid": "3d031948-681f-43fd-91d7-c88b8b59d70c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.285119] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963845, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.364798] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963844, 'name': CreateVM_Task, 'duration_secs': 0.678147} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.367472] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1124.368384] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.368677] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.368920] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1124.369206] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6efe5c3-d0ea-4b83-8359-a1fce40f3f0c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.374405] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1124.374405] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c220f2-6d1d-81c4-21c6-7147c106607e" [ 1124.374405] env[68279]: _type = "Task" [ 1124.374405] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.385493] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c220f2-6d1d-81c4-21c6-7147c106607e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.426077] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b2b152-cf5a-436d-a7ec-1bae1db29653 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.433737] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0e0722-1dbe-4db5-bdbd-a761ea8afca0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.467089] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df934b97-4125-4de8-a17b-271616234433 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.474644] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f896ba-4e9e-4f0e-b259-31bceda5df8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.489236] env[68279]: DEBUG nova.network.neutron [-] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.491037] env[68279]: DEBUG nova.compute.provider_tree [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.714118] env[68279]: DEBUG oslo_concurrency.lockutils [req-2e9c0338-acf6-4b95-b648-16830f1db39e req-590a515c-45cb-414d-b790-aea467952bde service nova] Releasing lock "refresh_cache-bfc3f843-3295-4381-8c9f-3bad711603fc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.780493] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.786389] env[68279]: DEBUG oslo_vmware.api [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963845, 'name': PowerOnVM_Task, 'duration_secs': 0.597455} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.786658] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.786857] env[68279]: INFO nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Took 7.67 seconds to spawn the instance on the hypervisor. [ 1124.787067] env[68279]: DEBUG nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.787798] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0497f916-1988-438d-9bd3-1b629777cfac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.885572] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c220f2-6d1d-81c4-21c6-7147c106607e, 'name': SearchDatastore_Task, 'duration_secs': 0.019919} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.885870] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.886120] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1124.886369] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.886520] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.886703] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1124.886954] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e3df94a-17ca-4c5a-a22d-0746acb5b1ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.903215] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1124.903390] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1124.904119] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af201b0f-d64c-47d3-8e2b-af685420d116 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.909537] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1124.909537] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cc1614-ec62-54e3-af16-983e6288206c" [ 1124.909537] env[68279]: _type = "Task" [ 1124.909537] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.916871] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cc1614-ec62-54e3-af16-983e6288206c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.994465] env[68279]: INFO nova.compute.manager [-] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Took 1.52 seconds to deallocate network for instance. [ 1124.996028] env[68279]: DEBUG nova.scheduler.client.report [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.307579] env[68279]: INFO nova.compute.manager [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Took 16.71 seconds to build instance. [ 1125.424299] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cc1614-ec62-54e3-af16-983e6288206c, 'name': SearchDatastore_Task, 'duration_secs': 0.011024} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.424299] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cdfd697-3f1a-4a61-885f-14d08388330c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.430032] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1125.430032] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526760ad-417f-da27-774b-b9df3f88b4df" [ 1125.430032] env[68279]: _type = "Task" [ 1125.430032] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.438532] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526760ad-417f-da27-774b-b9df3f88b4df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.505040] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.505040] env[68279]: DEBUG nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1125.510354] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.505s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.510747] env[68279]: DEBUG nova.objects.instance [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'resources' on Instance uuid 50f390b2-99b7-49f3-997f-7d7b50cff9f2 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.512385] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.517512] env[68279]: DEBUG nova.compute.manager [req-ae319df5-5cbd-418f-960c-4a002a1b998b req-25ff42e4-c81b-4425-908d-0bdb6e94f505 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Received event network-vif-deleted-7243843a-c48e-44d5-990f-1de0a9191cbd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.809511] env[68279]: DEBUG oslo_concurrency.lockutils [None req-80f89a18-900b-4717-b94e-c8cf8ebae15a tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.217s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.942716] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]526760ad-417f-da27-774b-b9df3f88b4df, 'name': SearchDatastore_Task, 'duration_secs': 0.010627} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.943147] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.943498] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] bfc3f843-3295-4381-8c9f-3bad711603fc/bfc3f843-3295-4381-8c9f-3bad711603fc.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1125.943820] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df55d630-a659-44f8-8030-e050d6a52d35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.955776] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1125.955776] env[68279]: value = "task-2963846" [ 1125.955776] env[68279]: _type = "Task" [ 1125.955776] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.965102] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.024677] env[68279]: DEBUG nova.compute.utils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1126.030270] env[68279]: DEBUG nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1126.240435] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ce1500-f309-452a-9da0-ae0368013bb8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.250198] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d10c0ad-ec69-4174-b84e-8c2577f320e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.283150] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d85c001-605b-4551-8aa3-6b33faeeca03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.291148] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc014c0-ca62-4bc3-bacc-e2ceb5287ebf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.307123] env[68279]: DEBUG nova.compute.provider_tree [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.466990] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47139} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.467299] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] bfc3f843-3295-4381-8c9f-3bad711603fc/bfc3f843-3295-4381-8c9f-3bad711603fc.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1126.467525] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1126.467774] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f69036bb-4487-4010-86e1-fa71eaf7a19a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.475595] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1126.475595] env[68279]: value = "task-2963847" [ 1126.475595] env[68279]: _type = "Task" [ 1126.475595] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.484787] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963847, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.531650] env[68279]: DEBUG nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1126.810340] env[68279]: DEBUG nova.scheduler.client.report [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.986509] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963847, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06115} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.986663] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1126.987456] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63763397-e8be-4995-972a-8b1b1b278279 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.015497] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] bfc3f843-3295-4381-8c9f-3bad711603fc/bfc3f843-3295-4381-8c9f-3bad711603fc.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1127.015879] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e988e43-b164-4a98-8e8d-6b499ebb2289 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.033822] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.034095] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.042067] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1127.042067] env[68279]: value = "task-2963848" [ 1127.042067] env[68279]: _type = "Task" [ 1127.042067] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.053992] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963848, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.109240] env[68279]: DEBUG nova.compute.manager [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1127.319612] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.809s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.322458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.123s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.322934] env[68279]: INFO nova.compute.claims [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.346586] env[68279]: INFO nova.scheduler.client.report [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted allocations for instance 50f390b2-99b7-49f3-997f-7d7b50cff9f2 [ 1127.541308] env[68279]: DEBUG nova.compute.utils [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1127.547079] env[68279]: DEBUG nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1127.555698] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.567367] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1127.567530] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.567740] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1127.567868] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.568020] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1127.568170] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1127.568377] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1127.568535] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1127.568699] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1127.568859] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1127.569044] env[68279]: DEBUG nova.virt.hardware [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1127.570103] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d573f4d6-2cd7-4f0a-ac9c-14d1032a473d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.578186] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10287b01-14b3-4e58-b6c5-a9526a5fe669 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.591898] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.597352] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Creating folder: Project (e42862b791194f4f94dd969169629712). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1127.597622] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33e3a588-dee6-4c92-929e-4eca3539bf07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.606615] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Created folder: Project (e42862b791194f4f94dd969169629712) in parent group-v594445. [ 1127.606795] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Creating folder: Instances. Parent ref: group-v594740. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1127.607017] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eff6cff1-1258-47c3-9c06-1d9d41092b63 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.616994] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Created folder: Instances in parent group-v594740. [ 1127.617262] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1127.617452] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.617653] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd60f094-5486-4928-9414-ff19726bea18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.630998] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.635836] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.635836] env[68279]: value = "task-2963852" [ 1127.635836] env[68279]: _type = "Task" [ 1127.635836] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.643109] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963852, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.854411] env[68279]: DEBUG oslo_concurrency.lockutils [None req-182b6aca-9c65-4ae5-a376-06a926992fc5 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "50f390b2-99b7-49f3-997f-7d7b50cff9f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.819s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.047841] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.059427] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963848, 'name': ReconfigVM_Task, 'duration_secs': 0.815698} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.059834] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Reconfigured VM instance instance-0000006e to attach disk [datastore1] bfc3f843-3295-4381-8c9f-3bad711603fc/bfc3f843-3295-4381-8c9f-3bad711603fc.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1128.060595] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5333fcf-021c-4072-8229-335e7186b5e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.069265] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1128.069265] env[68279]: value = "task-2963855" [ 1128.069265] env[68279]: _type = "Task" [ 1128.069265] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.077471] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963855, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.147946] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963852, 'name': CreateVM_Task, 'duration_secs': 0.307001} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.148241] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.148966] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.149381] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.149810] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1128.150260] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfd3bac2-4c21-4da9-a570-072b251be3bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.159453] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1128.159453] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520c4e68-5cb7-8c54-837a-4944f78d8c64" [ 1128.159453] env[68279]: _type = "Task" [ 1128.159453] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.168546] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520c4e68-5cb7-8c54-837a-4944f78d8c64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.526911] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06b9bfd-2ee8-477c-b184-e7a5e6ba7e2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.535148] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360bed06-3eb2-4987-85ef-4a683eb7ead1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.579133] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa3a4c9-dd3b-428f-9ba7-8696ac19899e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.586660] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963855, 'name': Rename_Task, 'duration_secs': 0.150369} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.589535] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1128.590550] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b11ee6e1-42a4-480e-b69c-0a94a40cae90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.592755] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1392d7c1-cecc-46f9-a4c1-a611e84cdfc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.610304] env[68279]: DEBUG nova.compute.provider_tree [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.613087] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1128.613087] env[68279]: value = "task-2963856" [ 1128.613087] env[68279]: _type = "Task" [ 1128.613087] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.621839] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963856, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.666559] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520c4e68-5cb7-8c54-837a-4944f78d8c64, 'name': SearchDatastore_Task, 'duration_secs': 0.013563} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.666861] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.667108] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.667412] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.667412] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.668143] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.668143] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-147724d9-1014-4ce3-ad83-745d04a5055a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.681033] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.681221] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.681968] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-504afb7b-21de-4bbb-874f-9038bcab830f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.687786] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1128.687786] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524dd7ae-059d-5d89-dd8f-91cd97f72f24" [ 1128.687786] env[68279]: _type = "Task" [ 1128.687786] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.697235] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524dd7ae-059d-5d89-dd8f-91cd97f72f24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.102489] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.102755] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.102998] env[68279]: INFO nova.compute.manager [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Attaching volume 9c91badb-14f4-4773-b94b-8be9f58d0d64 to /dev/sdb [ 1129.114310] env[68279]: DEBUG nova.scheduler.client.report [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.131170] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963856, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.140487] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92950812-532e-4a82-a8db-00de19854cb1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.147138] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ae1cc8-bdc6-43ce-80b4-5b5709db9b1e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.164682] env[68279]: DEBUG nova.virt.block_device [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updating existing volume attachment record: 83451d54-631f-4b58-acda-f9271498d841 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1129.197314] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524dd7ae-059d-5d89-dd8f-91cd97f72f24, 'name': SearchDatastore_Task, 'duration_secs': 0.025771} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.198282] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-919d63b1-e5bd-4ca7-b7a6-be0d779067f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.204012] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1129.204012] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521808d9-07f9-75aa-2dbe-79df3831f536" [ 1129.204012] env[68279]: _type = "Task" [ 1129.204012] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.211512] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521808d9-07f9-75aa-2dbe-79df3831f536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.553745] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.554043] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.627870] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.628428] env[68279]: DEBUG nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1129.630963] env[68279]: DEBUG oslo_vmware.api [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963856, 'name': PowerOnVM_Task, 'duration_secs': 0.529271} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.631427] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.851s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.631637] env[68279]: DEBUG nova.objects.instance [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'pci_requests' on Instance uuid dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.632582] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1129.632783] env[68279]: INFO nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1129.632962] env[68279]: DEBUG nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1129.634011] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51882aea-52f8-42e5-a4cc-a6f7a84b63f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.712904] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521808d9-07f9-75aa-2dbe-79df3831f536, 'name': SearchDatastore_Task, 'duration_secs': 0.038944} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.713252] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.713516] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7c9c6661-2e52-4dba-8671-26f69d089903/7c9c6661-2e52-4dba-8671-26f69d089903.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.713758] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e1d2344-e068-4b8c-b7e9-72a9574c155b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.719655] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1129.719655] env[68279]: value = "task-2963860" [ 1129.719655] env[68279]: _type = "Task" [ 1129.719655] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.727371] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.057824] env[68279]: DEBUG nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1130.133041] env[68279]: DEBUG nova.compute.utils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1130.134729] env[68279]: DEBUG nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1130.136566] env[68279]: DEBUG nova.objects.instance [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'numa_topology' on Instance uuid dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.153418] env[68279]: INFO nova.compute.manager [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Took 18.37 seconds to build instance. [ 1130.236193] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963860, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.582052] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.638625] env[68279]: DEBUG nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1130.641976] env[68279]: INFO nova.compute.claims [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.654184] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3bbd0375-e922-4d03-a1ca-9bf188aa9af7 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.891s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.730200] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963860, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635112} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.730474] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 7c9c6661-2e52-4dba-8671-26f69d089903/7c9c6661-2e52-4dba-8671-26f69d089903.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1130.730686] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1130.730933] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47573857-b914-4300-ad84-7d71f7eec6d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.736789] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1130.736789] env[68279]: value = "task-2963862" [ 1130.736789] env[68279]: _type = "Task" [ 1130.736789] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.744784] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.247915] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068471} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.248197] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1131.248964] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ae1bd2-fab7-4c72-8996-42a30144f6ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.268696] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 7c9c6661-2e52-4dba-8671-26f69d089903/7c9c6661-2e52-4dba-8671-26f69d089903.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1131.268970] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f130bef-762b-4388-98b7-e4981b4f8d9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.289391] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1131.289391] env[68279]: value = "task-2963863" [ 1131.289391] env[68279]: _type = "Task" [ 1131.289391] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.297374] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963863, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.485458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "bfc3f843-3295-4381-8c9f-3bad711603fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.485731] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.485941] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "bfc3f843-3295-4381-8c9f-3bad711603fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.486148] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.486346] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.488455] env[68279]: INFO nova.compute.manager [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Terminating instance [ 1131.652833] env[68279]: DEBUG nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1131.680846] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1131.681015] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1131.681196] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1131.681378] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1131.681524] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1131.681725] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1131.681877] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1131.682046] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1131.682383] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1131.682423] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1131.682571] env[68279]: DEBUG nova.virt.hardware [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1131.683532] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e164c7-4a1f-4ab9-864e-599b63c1e0f9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.695259] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3d9ed0-5ae5-41b0-933e-620d7dbdcef4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.709409] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1131.715220] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1131.718526] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1131.719269] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-084f7ab9-3f29-4eb1-b499-94d10dca0b0e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.739924] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1131.739924] env[68279]: value = "task-2963865" [ 1131.739924] env[68279]: _type = "Task" [ 1131.739924] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.749837] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963865, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.801669] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963863, 'name': ReconfigVM_Task, 'duration_secs': 0.328736} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.802102] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 7c9c6661-2e52-4dba-8671-26f69d089903/7c9c6661-2e52-4dba-8671-26f69d089903.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.802864] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a469d5e3-fd30-4449-adcf-b8287f37e96b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.812934] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1131.812934] env[68279]: value = "task-2963866" [ 1131.812934] env[68279]: _type = "Task" [ 1131.812934] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.821880] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963866, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.877190] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d23f6b-81c1-43ab-88f8-c95f4c5b985a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.885671] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4c61a7-cbe6-4bb3-a51a-9b587cec61d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.918598] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a999c52-5562-4989-95b5-f101d801b0b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.926963] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187a8ba0-6f4f-438d-9722-63492e4e2712 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.944059] env[68279]: DEBUG nova.compute.provider_tree [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.992285] env[68279]: DEBUG nova.compute.manager [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1131.992523] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1131.993441] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3743793b-b68a-4bd1-a6c6-7b314c80d34e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.001912] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.002212] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e75b7785-93bc-4c1e-beba-5fe834150ea6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.008472] env[68279]: DEBUG oslo_vmware.api [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1132.008472] env[68279]: value = "task-2963867" [ 1132.008472] env[68279]: _type = "Task" [ 1132.008472] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.020239] env[68279]: DEBUG oslo_vmware.api [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.254814] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963865, 'name': CreateVM_Task, 'duration_secs': 0.280911} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.255100] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1132.255740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.256030] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.256531] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1132.256913] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-604dc595-97b6-4400-b518-93f414bfe693 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.263528] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1132.263528] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b72a1d-acb1-f361-0b53-4f19b5a1999f" [ 1132.263528] env[68279]: _type = "Task" [ 1132.263528] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.275698] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b72a1d-acb1-f361-0b53-4f19b5a1999f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.327065] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963866, 'name': Rename_Task, 'duration_secs': 0.16272} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.327065] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1132.327315] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba6c9d22-59e6-4042-83f9-eeeff5a73cb0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.334973] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1132.334973] env[68279]: value = "task-2963868" [ 1132.334973] env[68279]: _type = "Task" [ 1132.334973] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.346150] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.448146] env[68279]: DEBUG nova.scheduler.client.report [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.518580] env[68279]: DEBUG oslo_vmware.api [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963867, 'name': PowerOffVM_Task, 'duration_secs': 0.296096} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.518848] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.519032] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1132.519292] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b825a5f-ed6a-4fb5-9514-feb7c4b8be48 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.580342] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1132.580628] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1132.580806] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleting the datastore file [datastore1] bfc3f843-3295-4381-8c9f-3bad711603fc {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1132.581092] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be145304-3c8c-4f42-982e-f2d47356880d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.588147] env[68279]: DEBUG oslo_vmware.api [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1132.588147] env[68279]: value = "task-2963870" [ 1132.588147] env[68279]: _type = "Task" [ 1132.588147] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.596165] env[68279]: DEBUG oslo_vmware.api [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.773536] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b72a1d-acb1-f361-0b53-4f19b5a1999f, 'name': SearchDatastore_Task, 'duration_secs': 0.012511} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.773869] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.774061] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1132.774295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.774485] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.774694] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.774952] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4be5f984-8f1b-4d72-af55-7723742456c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.783446] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.783617] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1132.784326] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ed43bd5-245e-4480-9e31-607204682d81 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.789387] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1132.789387] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5239268c-3f3f-ed95-55d7-8ead24745419" [ 1132.789387] env[68279]: _type = "Task" [ 1132.789387] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.796898] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5239268c-3f3f-ed95-55d7-8ead24745419, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.845224] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963868, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.953720] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.322s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.955880] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.443s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.956131] env[68279]: DEBUG nova.objects.instance [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'resources' on Instance uuid f6a65d1b-ba9c-44b7-b9aa-815cabd45176 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.985626] env[68279]: INFO nova.network.neutron [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating port 343369ce-f2d1-401a-9a78-b72854001a75 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1133.097992] env[68279]: DEBUG oslo_vmware.api [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254153} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.098267] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1133.098454] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1133.098631] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1133.098800] env[68279]: INFO nova.compute.manager [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1133.099045] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1133.099237] env[68279]: DEBUG nova.compute.manager [-] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1133.099331] env[68279]: DEBUG nova.network.neutron [-] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1133.300708] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5239268c-3f3f-ed95-55d7-8ead24745419, 'name': SearchDatastore_Task, 'duration_secs': 0.009952} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.301488] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe7d047f-ca7d-450b-92c9-ea54b3333422 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.307249] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1133.307249] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529ac40a-bdf1-4df4-efde-bd9b68fe20e0" [ 1133.307249] env[68279]: _type = "Task" [ 1133.307249] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.314605] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529ac40a-bdf1-4df4-efde-bd9b68fe20e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.346048] env[68279]: DEBUG oslo_vmware.api [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963868, 'name': PowerOnVM_Task, 'duration_secs': 0.56318} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.347764] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1133.348066] env[68279]: INFO nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Took 5.80 seconds to spawn the instance on the hypervisor. [ 1133.348310] env[68279]: DEBUG nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1133.349342] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff6d665-138c-4d9f-a13d-2b831bbfb6ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.582932] env[68279]: DEBUG nova.compute.manager [req-48714d27-985f-40dc-8894-d5c0bd192b0b req-d523201f-db98-47ed-93f1-3deb59fbb7fe service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Received event network-vif-deleted-3d031948-681f-43fd-91d7-c88b8b59d70c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1133.583337] env[68279]: INFO nova.compute.manager [req-48714d27-985f-40dc-8894-d5c0bd192b0b req-d523201f-db98-47ed-93f1-3deb59fbb7fe service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Neutron deleted interface 3d031948-681f-43fd-91d7-c88b8b59d70c; detaching it from the instance and deleting it from the info cache [ 1133.583337] env[68279]: DEBUG nova.network.neutron [req-48714d27-985f-40dc-8894-d5c0bd192b0b req-d523201f-db98-47ed-93f1-3deb59fbb7fe service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.634812] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48603cbc-d436-4854-8f06-8768eb41a18f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.642795] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c92d06-f8be-4405-9adc-5dbbbd3be017 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.678589] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34707875-8413-49f4-ab37-34b8932ce90b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.686058] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07cb6bd-22af-4bf7-9a5c-0b27e2071f5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.706691] env[68279]: DEBUG nova.compute.provider_tree [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.817251] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529ac40a-bdf1-4df4-efde-bd9b68fe20e0, 'name': SearchDatastore_Task, 'duration_secs': 0.039883} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.817528] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.817764] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1133.818031] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aaad6035-cd5f-4c86-8514-5b31ec1cac11 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.825405] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1133.825405] env[68279]: value = "task-2963871" [ 1133.825405] env[68279]: _type = "Task" [ 1133.825405] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.833262] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.849247] env[68279]: DEBUG nova.network.neutron [-] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.865472] env[68279]: INFO nova.compute.manager [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Took 16.00 seconds to build instance. [ 1134.086625] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff89de18-a524-4e35-8d89-09d05d5cfaef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.098103] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b059605f-4b6d-400e-8d4d-9860f4603e52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.135267] env[68279]: DEBUG nova.compute.manager [req-48714d27-985f-40dc-8894-d5c0bd192b0b req-d523201f-db98-47ed-93f1-3deb59fbb7fe service nova] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Detach interface failed, port_id=3d031948-681f-43fd-91d7-c88b8b59d70c, reason: Instance bfc3f843-3295-4381-8c9f-3bad711603fc could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1134.209112] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1134.209440] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594747', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'name': 'volume-9c91badb-14f4-4773-b94b-8be9f58d0d64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3da334a-1dfc-41d8-8ba8-aabe53924bdc', 'attached_at': '', 'detached_at': '', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'serial': '9c91badb-14f4-4773-b94b-8be9f58d0d64'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1134.210469] env[68279]: DEBUG nova.scheduler.client.report [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.214732] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fd8c0a-fec9-448a-9bec-8c27149aa053 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.234726] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ee171b-a6d2-448e-932f-016deba6e751 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.263490] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-9c91badb-14f4-4773-b94b-8be9f58d0d64/volume-9c91badb-14f4-4773-b94b-8be9f58d0d64.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.263826] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99ce2d3c-c98d-4b18-a09a-d9f0c97f68a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.285034] env[68279]: DEBUG oslo_vmware.api [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1134.285034] env[68279]: value = "task-2963872" [ 1134.285034] env[68279]: _type = "Task" [ 1134.285034] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.293819] env[68279]: DEBUG oslo_vmware.api [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963872, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.338106] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963871, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.351854] env[68279]: INFO nova.compute.manager [-] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Took 1.25 seconds to deallocate network for instance. [ 1134.368935] env[68279]: DEBUG oslo_concurrency.lockutils [None req-79e355cc-a1ff-43c7-896a-e30d72bbfad3 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "7c9c6661-2e52-4dba-8671-26f69d089903" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.518s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.398999] env[68279]: DEBUG nova.compute.manager [req-19f3dd50-e92e-4096-a315-7733cf02a25f req-ad2634cd-77ad-4bd4-a20a-b64dcb13c2b6 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-vif-plugged-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1134.399255] env[68279]: DEBUG oslo_concurrency.lockutils [req-19f3dd50-e92e-4096-a315-7733cf02a25f req-ad2634cd-77ad-4bd4-a20a-b64dcb13c2b6 service nova] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.399463] env[68279]: DEBUG oslo_concurrency.lockutils [req-19f3dd50-e92e-4096-a315-7733cf02a25f req-ad2634cd-77ad-4bd4-a20a-b64dcb13c2b6 service nova] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.399707] env[68279]: DEBUG oslo_concurrency.lockutils [req-19f3dd50-e92e-4096-a315-7733cf02a25f req-ad2634cd-77ad-4bd4-a20a-b64dcb13c2b6 service nova] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.399784] env[68279]: DEBUG nova.compute.manager [req-19f3dd50-e92e-4096-a315-7733cf02a25f req-ad2634cd-77ad-4bd4-a20a-b64dcb13c2b6 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] No waiting events found dispatching network-vif-plugged-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1134.400171] env[68279]: WARNING nova.compute.manager [req-19f3dd50-e92e-4096-a315-7733cf02a25f req-ad2634cd-77ad-4bd4-a20a-b64dcb13c2b6 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received unexpected event network-vif-plugged-343369ce-f2d1-401a-9a78-b72854001a75 for instance with vm_state shelved_offloaded and task_state spawning. [ 1134.491096] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.491342] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.491538] env[68279]: DEBUG nova.network.neutron [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.719974] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.722442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.091s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.745752] env[68279]: INFO nova.scheduler.client.report [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted allocations for instance f6a65d1b-ba9c-44b7-b9aa-815cabd45176 [ 1134.795746] env[68279]: DEBUG oslo_vmware.api [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963872, 'name': ReconfigVM_Task, 'duration_secs': 0.40417} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.796059] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-9c91badb-14f4-4773-b94b-8be9f58d0d64/volume-9c91badb-14f4-4773-b94b-8be9f58d0d64.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1134.801303] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d9815a8-efc6-4198-865f-8873da5bcc9c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.820214] env[68279]: DEBUG oslo_vmware.api [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1134.820214] env[68279]: value = "task-2963874" [ 1134.820214] env[68279]: _type = "Task" [ 1134.820214] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.825687] env[68279]: DEBUG oslo_vmware.api [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963874, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.835009] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963871, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558321} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.835295] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1134.835519] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1134.835756] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0730eb97-1d54-4ab1-b75a-7c7bd42c4a67 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.841660] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1134.841660] env[68279]: value = "task-2963875" [ 1134.841660] env[68279]: _type = "Task" [ 1134.841660] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.849318] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.859468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.205631] env[68279]: DEBUG nova.network.neutron [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343369ce-f2", "ovs_interfaceid": "343369ce-f2d1-401a-9a78-b72854001a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.227303] env[68279]: INFO nova.compute.claims [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.252971] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6273014a-466b-48e4-ab1c-b5f74fd46986 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.409s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.253825] env[68279]: DEBUG oslo_concurrency.lockutils [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] Acquired lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.255013] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be62e0e1-d8a1-4f5b-9840-b891c2bd6954 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.263514] env[68279]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1135.263669] env[68279]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=68279) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1135.264263] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2984da4f-100d-42d7-9063-71dfe41ec677 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.272548] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507438d0-6525-42ce-9f41-999c1da5be7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.305633] env[68279]: ERROR root [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-594719' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-594719' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-594719' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-594719'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-594719' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-594719' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-594719'}\n"]: nova.exception.InstanceNotFound: Instance f6a65d1b-ba9c-44b7-b9aa-815cabd45176 could not be found. [ 1135.305883] env[68279]: DEBUG oslo_concurrency.lockutils [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] Releasing lock "f6a65d1b-ba9c-44b7-b9aa-815cabd45176" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.305996] env[68279]: DEBUG nova.compute.manager [req-a9979058-785e-42c4-8109-07cd70e94530 req-6a63b315-7730-4fb0-91ba-9e554f138eb1 service nova] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Detach interface failed, port_id=b892325d-c8dc-4176-b161-d5d789cd4e40, reason: Instance f6a65d1b-ba9c-44b7-b9aa-815cabd45176 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1135.326405] env[68279]: DEBUG oslo_vmware.api [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963874, 'name': ReconfigVM_Task, 'duration_secs': 0.160769} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.326687] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594747', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'name': 'volume-9c91badb-14f4-4773-b94b-8be9f58d0d64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3da334a-1dfc-41d8-8ba8-aabe53924bdc', 'attached_at': '', 'detached_at': '', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'serial': '9c91badb-14f4-4773-b94b-8be9f58d0d64'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1135.352076] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067017} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.352410] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.353178] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59deb3fa-e6af-454d-ac42-a60743faf44d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.372465] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.372948] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c7527d0-4793-435a-946e-36cc1177ff9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.391966] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1135.391966] env[68279]: value = "task-2963876" [ 1135.391966] env[68279]: _type = "Task" [ 1135.391966] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.399629] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.708740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.733338] env[68279]: INFO nova.compute.resource_tracker [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating resource usage from migration eacc2263-2386-415a-9e86-60a262a67b31 [ 1135.737683] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='543ae38f8938dcc2dfd1f0425e328937',container_format='bare',created_at=2025-03-12T08:51:47Z,direct_url=,disk_format='vmdk',id=4f678577-45a3-48b3-b6bb-321a68ff5e6e,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-816801310-shelved',owner='34fd2747aeac4bcd9dd18075cf4ebd8b',properties=ImageMetaProps,protected=,size=31665152,status='active',tags=,updated_at=2025-03-12T08:52:05Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1135.737907] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.738087] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1135.738346] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.738512] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1135.738663] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1135.738865] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1135.739090] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1135.739222] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1135.739401] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1135.739578] env[68279]: DEBUG nova.virt.hardware [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1135.741053] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2deac9b7-a51b-4a44-b281-ee19a4299cf5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.749434] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442d0002-4c5e-437f-8710-4740f9554a45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.763373] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:8f:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '343369ce-f2d1-401a-9a78-b72854001a75', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.771030] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1135.773815] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.774289] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8915b5c-e0b6-4172-b377-5e05e6209fa5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.795487] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.795487] env[68279]: value = "task-2963877" [ 1135.795487] env[68279]: _type = "Task" [ 1135.795487] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.804226] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963877, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.901911] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963876, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.927752] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a87fd7-e605-4ee6-b2ba-6f859cb2bfd1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.936804] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d3ccf5-26a3-4102-b1a0-cc9b46880570 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.977207] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3479ca-0244-4098-9c9a-5ef6414da3b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.982757] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fd7189-4a82-418b-bc78-4b353e045359 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.996600] env[68279]: DEBUG nova.compute.provider_tree [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.306109] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963877, 'name': CreateVM_Task, 'duration_secs': 0.403702} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.306357] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.306963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.307149] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.307520] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.307778] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c7def45-fba0-47dc-8cb6-7494f8bbcdb2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.311847] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1136.311847] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9715a-b758-121a-7371-034e74e0977a" [ 1136.311847] env[68279]: _type = "Task" [ 1136.311847] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.319508] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52c9715a-b758-121a-7371-034e74e0977a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.367526] env[68279]: DEBUG nova.objects.instance [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lazy-loading 'flavor' on Instance uuid e3da334a-1dfc-41d8-8ba8-aabe53924bdc {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.402697] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963876, 'name': ReconfigVM_Task, 'duration_secs': 0.54142} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.402911] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Reconfigured VM instance instance-00000070 to attach disk [datastore2] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.403561] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0fa5980-6bf0-419d-a867-e31e92f5feaf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.409813] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1136.409813] env[68279]: value = "task-2963878" [ 1136.409813] env[68279]: _type = "Task" [ 1136.409813] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.417751] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963878, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.425512] env[68279]: DEBUG nova.compute.manager [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-changed-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1136.425701] env[68279]: DEBUG nova.compute.manager [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing instance network info cache due to event network-changed-343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1136.425905] env[68279]: DEBUG oslo_concurrency.lockutils [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] Acquiring lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.426057] env[68279]: DEBUG oslo_concurrency.lockutils [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] Acquired lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.426218] env[68279]: DEBUG nova.network.neutron [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Refreshing network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1136.500191] env[68279]: DEBUG nova.scheduler.client.report [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.822866] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.822866] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Processing image 4f678577-45a3-48b3-b6bb-321a68ff5e6e {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.822866] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e/4f678577-45a3-48b3-b6bb-321a68ff5e6e.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.823035] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e/4f678577-45a3-48b3-b6bb-321a68ff5e6e.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.823123] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.823420] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f940b226-51a0-4b79-baab-f0b749f20b9f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.832071] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.832279] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1136.832991] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a67ff859-eee4-4bb2-bb24-2c0cc7e63d20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.838584] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1136.838584] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5236565f-25c1-f785-feb6-58984534903a" [ 1136.838584] env[68279]: _type = "Task" [ 1136.838584] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.845796] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5236565f-25c1-f785-feb6-58984534903a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.872698] env[68279]: DEBUG oslo_concurrency.lockutils [None req-9e446e13-86a6-40f2-97f7-ea25c801fe01 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.770s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.920489] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963878, 'name': Rename_Task, 'duration_secs': 0.320342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.920782] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1136.921018] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e901437b-425a-4b82-a3b3-4777e27656c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.926762] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1136.926762] env[68279]: value = "task-2963880" [ 1136.926762] env[68279]: _type = "Task" [ 1136.926762] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.938652] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963880, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.006071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.283s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.006686] env[68279]: INFO nova.compute.manager [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Migrating [ 1137.016391] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.435s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.018259] env[68279]: INFO nova.compute.claims [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.260279] env[68279]: DEBUG nova.network.neutron [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updated VIF entry in instance network info cache for port 343369ce-f2d1-401a-9a78-b72854001a75. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1137.260798] env[68279]: DEBUG nova.network.neutron [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [{"id": "343369ce-f2d1-401a-9a78-b72854001a75", "address": "fa:16:3e:38:8f:8a", "network": {"id": "2860928a-90a2-49bd-8b74-8e93e31827e9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-280786174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fd2747aeac4bcd9dd18075cf4ebd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343369ce-f2", "ovs_interfaceid": "343369ce-f2d1-401a-9a78-b72854001a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.349036] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Preparing fetch location {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1137.349309] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Fetch image to [datastore1] OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22/OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22.vmdk {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1137.349506] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Downloading stream optimized image 4f678577-45a3-48b3-b6bb-321a68ff5e6e to [datastore1] OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22/OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22.vmdk on the data store datastore1 as vApp {{(pid=68279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1137.349680] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Downloading image file data 4f678577-45a3-48b3-b6bb-321a68ff5e6e to the ESX as VM named 'OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22' {{(pid=68279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1137.423644] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1137.423644] env[68279]: value = "resgroup-9" [ 1137.423644] env[68279]: _type = "ResourcePool" [ 1137.423644] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1137.423644] env[68279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9c864d70-f0cd-4f59-b287-a6b7efc870f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.447493] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963880, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.448894] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease: (returnval){ [ 1137.448894] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520d0ce4-17a6-59f3-136d-30313b2cc2cd" [ 1137.448894] env[68279]: _type = "HttpNfcLease" [ 1137.448894] env[68279]: } obtained for vApp import into resource pool (val){ [ 1137.448894] env[68279]: value = "resgroup-9" [ 1137.448894] env[68279]: _type = "ResourcePool" [ 1137.448894] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1137.449243] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the lease: (returnval){ [ 1137.449243] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520d0ce4-17a6-59f3-136d-30313b2cc2cd" [ 1137.449243] env[68279]: _type = "HttpNfcLease" [ 1137.449243] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1137.454878] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1137.454878] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520d0ce4-17a6-59f3-136d-30313b2cc2cd" [ 1137.454878] env[68279]: _type = "HttpNfcLease" [ 1137.454878] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1137.531958] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.532160] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.532420] env[68279]: DEBUG nova.network.neutron [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.629238] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.629472] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.685605] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.685888] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.763379] env[68279]: DEBUG oslo_concurrency.lockutils [req-dd29acc9-0328-42e5-8e45-b629c6817b04 req-d6b57f3a-cdb1-41ae-886d-929d8b031958 service nova] Releasing lock "refresh_cache-dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.947041] env[68279]: DEBUG oslo_vmware.api [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963880, 'name': PowerOnVM_Task, 'duration_secs': 0.772008} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.947434] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1137.947551] env[68279]: INFO nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Took 6.30 seconds to spawn the instance on the hypervisor. [ 1137.947704] env[68279]: DEBUG nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.948499] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b982cc-e6e7-45b7-b75a-46c3e03dffe4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.961721] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1137.961721] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520d0ce4-17a6-59f3-136d-30313b2cc2cd" [ 1137.961721] env[68279]: _type = "HttpNfcLease" [ 1137.961721] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1137.962209] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1137.962209] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]520d0ce4-17a6-59f3-136d-30313b2cc2cd" [ 1137.962209] env[68279]: _type = "HttpNfcLease" [ 1137.962209] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1137.962887] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a8c572-3704-4c44-aac1-b820dd9e263f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.969837] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cef42c-4136-46ea-f990-fbc9947f1612/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1137.970050] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating HTTP connection to write to file with size = 31665152 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cef42c-4136-46ea-f990-fbc9947f1612/disk-0.vmdk. {{(pid=68279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1138.037084] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4a3d0725-6cd1-439e-aedd-6020f457f9c9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.132264] env[68279]: DEBUG nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1138.189219] env[68279]: INFO nova.compute.manager [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Detaching volume 9c91badb-14f4-4773-b94b-8be9f58d0d64 [ 1138.208712] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19248851-0552-48cb-8681-9da5e94582c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.231948] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31efea1-3f22-4c13-97cd-1c6ba64278eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.238691] env[68279]: INFO nova.virt.block_device [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Attempting to driver detach volume 9c91badb-14f4-4773-b94b-8be9f58d0d64 from mountpoint /dev/sdb [ 1138.238963] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1138.240221] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594747', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'name': 'volume-9c91badb-14f4-4773-b94b-8be9f58d0d64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3da334a-1dfc-41d8-8ba8-aabe53924bdc', 'attached_at': '', 'detached_at': '', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'serial': '9c91badb-14f4-4773-b94b-8be9f58d0d64'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1138.248027] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f8fa70-eef3-460f-94fd-9fae585017b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.303506] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e071488b-a256-4c24-a12c-d32b91c520b1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.306684] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdebe07-2cc5-4a96-b780-5e985a5e3e3a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.319255] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff593d8-0124-4d5f-8e8b-9a303fdf9492 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.325812] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3588d3c-54ef-4add-946c-9bf62bede0f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.358293] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5f8e8a-8297-4b5a-ba6a-e253ba1bc8a5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.361342] env[68279]: DEBUG nova.compute.provider_tree [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.366617] env[68279]: DEBUG nova.network.neutron [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance_info_cache with network_info: [{"id": "633e0597-c529-4822-bb62-9eb5fe78047c", "address": "fa:16:3e:4b:15:38", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633e0597-c5", "ovs_interfaceid": "633e0597-c529-4822-bb62-9eb5fe78047c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.383363] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] The volume has not been displaced from its original location: [datastore2] volume-9c91badb-14f4-4773-b94b-8be9f58d0d64/volume-9c91badb-14f4-4773-b94b-8be9f58d0d64.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1138.388618] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1138.394023] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5300041c-a3e1-4ee3-a29c-e3f85df3a3db {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.416052] env[68279]: DEBUG oslo_vmware.api [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1138.416052] env[68279]: value = "task-2963882" [ 1138.416052] env[68279]: _type = "Task" [ 1138.416052] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.427198] env[68279]: DEBUG oslo_vmware.api [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963882, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.472725] env[68279]: INFO nova.compute.manager [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Took 20.30 seconds to build instance. [ 1138.654799] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.869568] env[68279]: DEBUG nova.scheduler.client.report [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.873470] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.930104] env[68279]: DEBUG oslo_vmware.api [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963882, 'name': ReconfigVM_Task, 'duration_secs': 0.264414} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.930458] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1138.937363] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31ee327a-6861-4402-8edf-f25ebe7ffcba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.956395] env[68279]: DEBUG oslo_vmware.api [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1138.956395] env[68279]: value = "task-2963884" [ 1138.956395] env[68279]: _type = "Task" [ 1138.956395] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.967938] env[68279]: DEBUG oslo_vmware.api [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963884, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.974928] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eb853765-4987-4acf-87b3-df0c6b54dc11 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.817s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.095742] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Completed reading data from the image iterator. {{(pid=68279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1139.095974] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cef42c-4136-46ea-f990-fbc9947f1612/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1139.097029] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f113d3-37cd-4b8f-aed4-8c1436185afe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.104131] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cef42c-4136-46ea-f990-fbc9947f1612/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1139.104400] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cef42c-4136-46ea-f990-fbc9947f1612/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1139.104680] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-139f2d52-323a-41ee-98cd-5d04f6ad8f31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.341186] env[68279]: DEBUG oslo_vmware.rw_handles [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cef42c-4136-46ea-f990-fbc9947f1612/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1139.341623] env[68279]: INFO nova.virt.vmwareapi.images [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Downloaded image file data 4f678577-45a3-48b3-b6bb-321a68ff5e6e [ 1139.342418] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddc0dd7-bbeb-4a13-bb1a-240b6146815b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.361099] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-037cb808-e192-4afb-a59c-f407af7b5872 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.376304] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.376838] env[68279]: DEBUG nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1139.383079] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.523s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.383079] env[68279]: DEBUG nova.objects.instance [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lazy-loading 'resources' on Instance uuid bfc3f843-3295-4381-8c9f-3bad711603fc {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.386141] env[68279]: INFO nova.virt.vmwareapi.images [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] The imported VM was unregistered [ 1139.388525] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Caching image {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1139.389208] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Creating directory with path [datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.389268] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64ab47b0-6a8b-49c4-af16-7ceab6bf622c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.399765] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Created directory with path [datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.400422] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22/OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22.vmdk to [datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e/4f678577-45a3-48b3-b6bb-321a68ff5e6e.vmdk. {{(pid=68279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1139.400422] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7917ce41-c3f4-4344-8980-783033604eaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.407642] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1139.407642] env[68279]: value = "task-2963886" [ 1139.407642] env[68279]: _type = "Task" [ 1139.407642] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.415573] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963886, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.466799] env[68279]: DEBUG oslo_vmware.api [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963884, 'name': ReconfigVM_Task, 'duration_secs': 0.161096} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.467102] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594747', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'name': 'volume-9c91badb-14f4-4773-b94b-8be9f58d0d64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3da334a-1dfc-41d8-8ba8-aabe53924bdc', 'attached_at': '', 'detached_at': '', 'volume_id': '9c91badb-14f4-4773-b94b-8be9f58d0d64', 'serial': '9c91badb-14f4-4773-b94b-8be9f58d0d64'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1139.571137] env[68279]: INFO nova.compute.manager [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Rebuilding instance [ 1139.619024] env[68279]: DEBUG nova.compute.manager [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.619181] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9662d832-0c3c-467f-8233-a98b3d860269 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.884525] env[68279]: DEBUG nova.compute.utils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1139.888890] env[68279]: DEBUG nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1139.889101] env[68279]: DEBUG nova.network.neutron [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1139.924665] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963886, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.948133] env[68279]: DEBUG nova.policy [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd67d0e35641a4494a5087e0f3abdc767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd1384256d224e80bf6f25b9fd054376', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1140.015736] env[68279]: DEBUG nova.objects.instance [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lazy-loading 'flavor' on Instance uuid e3da334a-1dfc-41d8-8ba8-aabe53924bdc {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.100849] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995616b2-eac3-4a09-a303-383616fc5e59 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.111063] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc32e1e5-28d5-47b0-9a31-e6faf0c4dafa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.147740] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ba8c68-5581-4560-a150-e1df4b6ed265 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.157367] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3076bfeb-319d-431d-b771-3c73af9e02fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.174210] env[68279]: DEBUG nova.compute.provider_tree [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.314406] env[68279]: DEBUG nova.network.neutron [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Successfully created port: ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1140.392210] env[68279]: DEBUG nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1140.400420] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a551cf1-2c98-4abb-a55a-9173ee01e94f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.417712] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance '2d05e318-abef-43b0-9ad3-8c839c372780' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1140.431450] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963886, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.650050] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.650050] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32921d24-e2da-460b-bb18-d90da405f3f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.657193] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1140.657193] env[68279]: value = "task-2963887" [ 1140.657193] env[68279]: _type = "Task" [ 1140.657193] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.665988] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963887, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.679496] env[68279]: DEBUG nova.scheduler.client.report [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1140.930767] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.930767] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963886, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.930767] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8f4dd42-d56b-4b91-82ca-09403b79ff14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.938929] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1140.938929] env[68279]: value = "task-2963889" [ 1140.938929] env[68279]: _type = "Task" [ 1140.938929] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.950928] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.024757] env[68279]: DEBUG oslo_concurrency.lockutils [None req-50b1b15a-f80b-4b4a-807f-596fbc0585ca tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.339s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.165551] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963887, 'name': PowerOffVM_Task, 'duration_secs': 0.193857} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.165794] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1141.166039] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1141.166888] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d196696a-993f-482b-8faf-b177fdae3775 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.174016] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1141.174249] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6792bacd-cd23-4279-9305-8f47f8294d63 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.187359] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.805s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.189897] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.535s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.192694] env[68279]: INFO nova.compute.claims [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1141.198840] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1141.199500] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1141.199500] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleting the datastore file [datastore2] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.199500] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-786c6d08-d77f-4696-984a-d751fa71ed9d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.206983] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1141.206983] env[68279]: value = "task-2963891" [ 1141.206983] env[68279]: _type = "Task" [ 1141.206983] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.216164] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.217235] env[68279]: INFO nova.scheduler.client.report [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted allocations for instance bfc3f843-3295-4381-8c9f-3bad711603fc [ 1141.406040] env[68279]: DEBUG nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1141.429745] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963886, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.436395] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1141.436746] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1141.437007] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1141.437320] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1141.437525] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1141.437687] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1141.437922] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1141.438137] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1141.438388] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1141.438575] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1141.438749] env[68279]: DEBUG nova.virt.hardware [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1141.439748] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bbd9d1-d8ec-4784-b0bd-43a774c2bda2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.455268] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d27d2e7-c34a-4d4d-b53e-3fcaf206c450 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.460006] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.522665] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.523054] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.523317] env[68279]: DEBUG nova.compute.manager [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1141.524633] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ac5151-623d-4ec9-a490-872df5582e7d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.531752] env[68279]: DEBUG nova.compute.manager [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1141.532400] env[68279]: DEBUG nova.objects.instance [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'flavor' on Instance uuid 84b2828a-e62c-45b2-a5ee-067ca66e626b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.717326] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16224} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.717583] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1141.717784] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1141.717967] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1141.724699] env[68279]: DEBUG oslo_concurrency.lockutils [None req-631f749b-48c1-4e09-b315-c025fba5b649 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "bfc3f843-3295-4381-8c9f-3bad711603fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.239s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.928946] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963886, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.931348] env[68279]: DEBUG nova.compute.manager [req-8b8e0c82-2ff9-424d-98f2-407ea5e69ac0 req-1ee20878-4349-4d3d-b553-d29bfce2a33b service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Received event network-vif-plugged-ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.931563] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b8e0c82-2ff9-424d-98f2-407ea5e69ac0 req-1ee20878-4349-4d3d-b553-d29bfce2a33b service nova] Acquiring lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.931756] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b8e0c82-2ff9-424d-98f2-407ea5e69ac0 req-1ee20878-4349-4d3d-b553-d29bfce2a33b service nova] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.931922] env[68279]: DEBUG oslo_concurrency.lockutils [req-8b8e0c82-2ff9-424d-98f2-407ea5e69ac0 req-1ee20878-4349-4d3d-b553-d29bfce2a33b service nova] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.932101] env[68279]: DEBUG nova.compute.manager [req-8b8e0c82-2ff9-424d-98f2-407ea5e69ac0 req-1ee20878-4349-4d3d-b553-d29bfce2a33b service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] No waiting events found dispatching network-vif-plugged-ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1141.932268] env[68279]: WARNING nova.compute.manager [req-8b8e0c82-2ff9-424d-98f2-407ea5e69ac0 req-1ee20878-4349-4d3d-b553-d29bfce2a33b service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Received unexpected event network-vif-plugged-ce309dfa-d75b-46b8-a812-c42760e72418 for instance with vm_state building and task_state spawning. [ 1141.953313] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963889, 'name': PowerOffVM_Task, 'duration_secs': 0.987273} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.953598] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1141.953826] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance '2d05e318-abef-43b0-9ad3-8c839c372780' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1141.975837] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.976135] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.976337] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.976522] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.976713] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.979063] env[68279]: INFO nova.compute.manager [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Terminating instance [ 1142.028886] env[68279]: DEBUG nova.network.neutron [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Successfully updated port: ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1142.356704] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4531c260-429a-4677-a0e8-081b27e7a5a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.364452] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706b7057-d463-48d1-8e1b-7a22863acd51 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.396474] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efc49ed-8146-4e33-98f7-e88a7e97e625 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.404412] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aed8849-04b4-4bc4-825f-7922dd0549d5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.423523] env[68279]: DEBUG nova.compute.provider_tree [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.442383] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963886, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.532866} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.442627] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22/OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22.vmdk to [datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e/4f678577-45a3-48b3-b6bb-321a68ff5e6e.vmdk. [ 1142.442810] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Cleaning up location [datastore1] OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1142.442976] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_2799f013-4c22-4916-9562-94cb9ab3ad22 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.443253] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44a17222-22b2-42ae-8891-7a80ed5c5931 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.450990] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1142.450990] env[68279]: value = "task-2963892" [ 1142.450990] env[68279]: _type = "Task" [ 1142.450990] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.465576] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1142.465859] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.465971] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1142.466169] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.466314] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1142.466460] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1142.466672] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1142.470595] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1142.470797] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1142.470963] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1142.471154] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1142.480161] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eb08d5f-0d4e-43fd-8174-6e315d7bac45 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.492076] env[68279]: DEBUG nova.compute.manager [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.492299] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.493469] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25120f36-8c34-47eb-9a87-bd485799c21c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.499410] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.501043] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1142.501043] env[68279]: value = "task-2963893" [ 1142.501043] env[68279]: _type = "Task" [ 1142.501043] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.505601] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.506143] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41ffaf10-e92c-475d-8515-8132aa00447c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.511254] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963893, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.512360] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1142.512360] env[68279]: value = "task-2963894" [ 1142.512360] env[68279]: _type = "Task" [ 1142.512360] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.520773] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963894, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.531959] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.531959] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.531959] env[68279]: DEBUG nova.network.neutron [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.539370] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.539658] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65df7559-f9bd-43f7-ac3d-121dce5a93b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.547972] env[68279]: DEBUG oslo_vmware.api [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1142.547972] env[68279]: value = "task-2963895" [ 1142.547972] env[68279]: _type = "Task" [ 1142.547972] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.556672] env[68279]: DEBUG oslo_vmware.api [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.753883] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1142.754175] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.754291] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1142.754468] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.754616] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1142.754778] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1142.755012] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1142.755178] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1142.755346] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1142.755507] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1142.755670] env[68279]: DEBUG nova.virt.hardware [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1142.756613] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113c2f87-3441-4dbd-9bd8-d9ae36da9928 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.765180] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33426cae-9909-431a-9c85-0068970a92fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.778288] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.783851] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1142.784094] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1142.784308] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cc04c08-5ce9-4390-8f00-982382d0ad84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.801029] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1142.801029] env[68279]: value = "task-2963896" [ 1142.801029] env[68279]: _type = "Task" [ 1142.801029] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.808341] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963896, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.930448] env[68279]: DEBUG nova.scheduler.client.report [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1142.963492] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101577} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.963827] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.964041] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e/4f678577-45a3-48b3-b6bb-321a68ff5e6e.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.964380] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e/4f678577-45a3-48b3-b6bb-321a68ff5e6e.vmdk to [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1142.964694] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7eecd601-d938-487d-8391-b1afb85d18a3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.972402] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1142.972402] env[68279]: value = "task-2963898" [ 1142.972402] env[68279]: _type = "Task" [ 1142.972402] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.981061] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.010251] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963893, 'name': ReconfigVM_Task, 'duration_secs': 0.241533} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.010561] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance '2d05e318-abef-43b0-9ad3-8c839c372780' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1143.023187] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963894, 'name': PowerOffVM_Task, 'duration_secs': 0.237542} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.023506] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1143.023706] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1143.023971] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e526183-9be7-4164-917c-ce81930b5f14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.058054] env[68279]: DEBUG oslo_vmware.api [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963895, 'name': PowerOffVM_Task, 'duration_secs': 0.213708} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.058426] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1143.058550] env[68279]: DEBUG nova.compute.manager [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1143.059468] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e392439-e7da-42f8-9110-b08e1ee679ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.068659] env[68279]: DEBUG nova.network.neutron [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1143.087325] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1143.087546] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1143.087729] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleting the datastore file [datastore1] e3da334a-1dfc-41d8-8ba8-aabe53924bdc {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.090067] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a073e6d-96be-4c85-82a3-2a53d3cf0186 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.096127] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1143.096127] env[68279]: value = "task-2963900" [ 1143.096127] env[68279]: _type = "Task" [ 1143.096127] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.104172] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.220091] env[68279]: DEBUG nova.network.neutron [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Updating instance_info_cache with network_info: [{"id": "ce309dfa-d75b-46b8-a812-c42760e72418", "address": "fa:16:3e:60:97:a1", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce309dfa-d7", "ovs_interfaceid": "ce309dfa-d75b-46b8-a812-c42760e72418", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.250390] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.250636] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.311864] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963896, 'name': CreateVM_Task, 'duration_secs': 0.280699} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.312084] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1143.312541] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.312710] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.313092] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1143.313332] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6d69a8e-3d66-4c19-8567-1c27bfebd426 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.320336] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1143.320336] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c3dc3-d025-d335-73c0-dfbabe97b76f" [ 1143.320336] env[68279]: _type = "Task" [ 1143.320336] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.328706] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c3dc3-d025-d335-73c0-dfbabe97b76f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.435770] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.436338] env[68279]: DEBUG nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1143.491364] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963898, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.519657] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1143.520140] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1143.520386] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1143.520532] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1143.520679] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1143.520827] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1143.521106] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1143.521271] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1143.521437] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1143.521601] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1143.521777] env[68279]: DEBUG nova.virt.hardware [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1143.527762] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1143.528171] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b8afe91-172d-45ae-8ac3-c190cee06a2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.551891] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1143.551891] env[68279]: value = "task-2963901" [ 1143.551891] env[68279]: _type = "Task" [ 1143.551891] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.566502] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963901, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.574264] env[68279]: DEBUG oslo_concurrency.lockutils [None req-43b50e6e-de59-4b67-9624-0bfb83e27538 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.608964] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.723471] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.723821] env[68279]: DEBUG nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Instance network_info: |[{"id": "ce309dfa-d75b-46b8-a812-c42760e72418", "address": "fa:16:3e:60:97:a1", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce309dfa-d7", "ovs_interfaceid": "ce309dfa-d75b-46b8-a812-c42760e72418", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1143.724692] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:97:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce309dfa-d75b-46b8-a812-c42760e72418', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1143.732904] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.733242] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1143.733493] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8766d64-6598-4e99-b656-d5f8023f0cf1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.754086] env[68279]: DEBUG nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1143.758870] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1143.758870] env[68279]: value = "task-2963902" [ 1143.758870] env[68279]: _type = "Task" [ 1143.758870] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.769047] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963902, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.836109] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c3dc3-d025-d335-73c0-dfbabe97b76f, 'name': SearchDatastore_Task, 'duration_secs': 0.064129} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.836530] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.836783] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.837039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.837195] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.837383] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.837882] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18f21d30-52e7-40da-9bcb-6b47edd41b62 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.857621] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.857895] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.859225] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3b50776-ebea-4032-91d0-a79323cd63ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.869273] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1143.869273] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529609f1-b199-e6ab-40b3-c5418d95cf31" [ 1143.869273] env[68279]: _type = "Task" [ 1143.869273] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.880072] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529609f1-b199-e6ab-40b3-c5418d95cf31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.942325] env[68279]: DEBUG nova.compute.utils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1143.944039] env[68279]: DEBUG nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1143.944215] env[68279]: DEBUG nova.network.neutron [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1143.963133] env[68279]: DEBUG nova.objects.instance [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'flavor' on Instance uuid 84b2828a-e62c-45b2-a5ee-067ca66e626b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.967694] env[68279]: DEBUG nova.compute.manager [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Received event network-changed-ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.967915] env[68279]: DEBUG nova.compute.manager [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Refreshing instance network info cache due to event network-changed-ce309dfa-d75b-46b8-a812-c42760e72418. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1143.968275] env[68279]: DEBUG oslo_concurrency.lockutils [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] Acquiring lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.968477] env[68279]: DEBUG oslo_concurrency.lockutils [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] Acquired lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.968727] env[68279]: DEBUG nova.network.neutron [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Refreshing network info cache for port ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1143.992068] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963898, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.994148] env[68279]: DEBUG nova.policy [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1144.064814] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.109516] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.277119] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963902, 'name': CreateVM_Task, 'duration_secs': 0.490663} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.277379] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1144.278167] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.278341] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.278758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1144.279063] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d31cbef-50ba-4757-b1e6-04b02dff349f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.285865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.286241] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.288203] env[68279]: INFO nova.compute.claims [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.292512] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1144.292512] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521b40f5-334d-6e77-ddf6-9d9c103b9dae" [ 1144.292512] env[68279]: _type = "Task" [ 1144.292512] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.302740] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521b40f5-334d-6e77-ddf6-9d9c103b9dae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.382447] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529609f1-b199-e6ab-40b3-c5418d95cf31, 'name': SearchDatastore_Task, 'duration_secs': 0.086894} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.383383] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2eaaa29-f065-4fa5-9479-62e3c3f8a0dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.387512] env[68279]: DEBUG nova.network.neutron [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Successfully created port: 43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1144.391742] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1144.391742] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b382a4-47d8-a2a8-4155-12f60587d839" [ 1144.391742] env[68279]: _type = "Task" [ 1144.391742] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.402198] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b382a4-47d8-a2a8-4155-12f60587d839, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.448177] env[68279]: DEBUG nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1144.468991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.469302] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.469414] env[68279]: DEBUG nova.network.neutron [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.469526] env[68279]: DEBUG nova.objects.instance [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'info_cache' on Instance uuid 84b2828a-e62c-45b2-a5ee-067ca66e626b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.486546] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963898, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.564841] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.609430] env[68279]: DEBUG oslo_vmware.api [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.230814} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.609648] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.609900] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.610166] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.610359] env[68279]: INFO nova.compute.manager [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Took 2.12 seconds to destroy the instance on the hypervisor. [ 1144.610602] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1144.610807] env[68279]: DEBUG nova.compute.manager [-] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1144.610902] env[68279]: DEBUG nova.network.neutron [-] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1144.810845] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521b40f5-334d-6e77-ddf6-9d9c103b9dae, 'name': SearchDatastore_Task, 'duration_secs': 0.02427} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.812148] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.812486] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1144.812746] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.812896] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.813084] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1144.813678] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-171e6b1d-87e5-4b4b-bf95-b7fffe3682b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.827906] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1144.828335] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1144.829457] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a139dbe1-ff97-495a-a9f0-cf5758f44339 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.839599] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1144.839599] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528408d0-1bfd-2fcd-8892-91eb9a4be50f" [ 1144.839599] env[68279]: _type = "Task" [ 1144.839599] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.853044] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528408d0-1bfd-2fcd-8892-91eb9a4be50f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.905645] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b382a4-47d8-a2a8-4155-12f60587d839, 'name': SearchDatastore_Task, 'duration_secs': 0.089683} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.906036] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.906384] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1144.906717] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58566cd3-bfd3-4274-bf5e-e3190d385011 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.915495] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1144.915495] env[68279]: value = "task-2963903" [ 1144.915495] env[68279]: _type = "Task" [ 1144.915495] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.926490] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.975573] env[68279]: DEBUG nova.objects.base [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Object Instance<84b2828a-e62c-45b2-a5ee-067ca66e626b> lazy-loaded attributes: flavor,info_cache {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1144.988359] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963898, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.071562] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963901, 'name': ReconfigVM_Task, 'duration_secs': 1.127997} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.071562] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1145.072277] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748c0062-bb07-497c-951a-c8c501731426 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.075765] env[68279]: DEBUG nova.network.neutron [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Updated VIF entry in instance network info cache for port ce309dfa-d75b-46b8-a812-c42760e72418. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.076130] env[68279]: DEBUG nova.network.neutron [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Updating instance_info_cache with network_info: [{"id": "ce309dfa-d75b-46b8-a812-c42760e72418", "address": "fa:16:3e:60:97:a1", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce309dfa-d7", "ovs_interfaceid": "ce309dfa-d75b-46b8-a812-c42760e72418", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.100285] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 2d05e318-abef-43b0-9ad3-8c839c372780/2d05e318-abef-43b0-9ad3-8c839c372780.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.101337] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e889138c-47aa-4c86-be60-e33f4285ab30 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.123009] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1145.123009] env[68279]: value = "task-2963905" [ 1145.123009] env[68279]: _type = "Task" [ 1145.123009] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.134697] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963905, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.355913] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]528408d0-1bfd-2fcd-8892-91eb9a4be50f, 'name': SearchDatastore_Task, 'duration_secs': 0.026947} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.360359] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4cac4a-2990-4817-a9ca-5bc73f41b7b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.366859] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1145.366859] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f8fd62-fab6-4d61-e18e-7211e7bb2837" [ 1145.366859] env[68279]: _type = "Task" [ 1145.366859] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.377959] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f8fd62-fab6-4d61-e18e-7211e7bb2837, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.424505] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.458162] env[68279]: DEBUG nova.compute.manager [req-0a7e917c-4799-4198-a482-d99ea215532c req-9d7a4890-8044-4383-8a16-3e372b22ef35 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Received event network-vif-deleted-1665daa6-4f83-44e0-8f73-d3ccc3eddb5f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1145.458571] env[68279]: INFO nova.compute.manager [req-0a7e917c-4799-4198-a482-d99ea215532c req-9d7a4890-8044-4383-8a16-3e372b22ef35 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Neutron deleted interface 1665daa6-4f83-44e0-8f73-d3ccc3eddb5f; detaching it from the instance and deleting it from the info cache [ 1145.462018] env[68279]: DEBUG nova.network.neutron [req-0a7e917c-4799-4198-a482-d99ea215532c req-9d7a4890-8044-4383-8a16-3e372b22ef35 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.462018] env[68279]: DEBUG nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1145.492621] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963898, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.420329} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.494866] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4f678577-45a3-48b3-b6bb-321a68ff5e6e/4f678577-45a3-48b3-b6bb-321a68ff5e6e.vmdk to [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1145.497134] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1145.497364] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1145.497523] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1145.497709] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1145.497855] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1145.497999] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1145.498217] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1145.498406] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1145.498592] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1145.498757] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1145.498932] env[68279]: DEBUG nova.virt.hardware [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1145.499961] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8259ac39-47e8-4f2a-8b5c-405dd4620236 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.503464] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96ab5ed-5184-4a5d-b20b-f9354b22744d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.521570] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afd95dd-97e2-4800-a2b2-7a437afb734d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.534462] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.538024] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb25dd93-6320-4d9d-8624-91ca3d49a0c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.553120] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cd1a72-08a5-4b42-8908-e029ffa1c54d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.575059] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca342fe-495c-41c0-8c83-d0bb9a8be12d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.581596] env[68279]: DEBUG oslo_concurrency.lockutils [req-44fd65f8-e06d-4c79-9217-65f75f5d1def req-20b92c3c-f3ad-4b88-a644-007e36d42e52 service nova] Releasing lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.582336] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1145.582336] env[68279]: value = "task-2963906" [ 1145.582336] env[68279]: _type = "Task" [ 1145.582336] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.617636] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2231c7-3e55-420d-92bc-ebe4d4c5877d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.625387] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.633050] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4a904d-55c6-48c9-95a4-faed75120132 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.641333] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963905, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.654651] env[68279]: DEBUG nova.compute.provider_tree [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.805295] env[68279]: DEBUG nova.network.neutron [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.877935] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f8fd62-fab6-4d61-e18e-7211e7bb2837, 'name': SearchDatastore_Task, 'duration_secs': 0.012917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.878249] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.878533] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 5a7e2125-3310-4fcb-a281-59b0a2c07f67/5a7e2125-3310-4fcb-a281-59b0a2c07f67.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1145.878798] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-676184ec-2711-478f-9408-f2d91d33289c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.885034] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1145.885034] env[68279]: value = "task-2963907" [ 1145.885034] env[68279]: _type = "Task" [ 1145.885034] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.895489] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.925624] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.895188} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.926011] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1145.926179] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1145.926439] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cc56b05-a622-42be-90b6-c841658594a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.935113] env[68279]: DEBUG nova.network.neutron [-] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.936333] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1145.936333] env[68279]: value = "task-2963908" [ 1145.936333] env[68279]: _type = "Task" [ 1145.936333] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.950655] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963908, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.967580] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9b87d84-bbf6-4b9d-bc68-bc13f375b605 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.978256] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1c5981-2e49-4b75-8a54-7935583c1ea2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.016307] env[68279]: DEBUG nova.compute.manager [req-0a7e917c-4799-4198-a482-d99ea215532c req-9d7a4890-8044-4383-8a16-3e372b22ef35 service nova] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Detach interface failed, port_id=1665daa6-4f83-44e0-8f73-d3ccc3eddb5f, reason: Instance e3da334a-1dfc-41d8-8ba8-aabe53924bdc could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1146.066021] env[68279]: DEBUG nova.compute.manager [req-a47c15e6-d4c0-4ce4-a54f-25dbdcf56751 req-ecf24a98-2c1e-416c-af45-a46432935dac service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-vif-plugged-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1146.066021] env[68279]: DEBUG oslo_concurrency.lockutils [req-a47c15e6-d4c0-4ce4-a54f-25dbdcf56751 req-ecf24a98-2c1e-416c-af45-a46432935dac service nova] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.066021] env[68279]: DEBUG oslo_concurrency.lockutils [req-a47c15e6-d4c0-4ce4-a54f-25dbdcf56751 req-ecf24a98-2c1e-416c-af45-a46432935dac service nova] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.066021] env[68279]: DEBUG oslo_concurrency.lockutils [req-a47c15e6-d4c0-4ce4-a54f-25dbdcf56751 req-ecf24a98-2c1e-416c-af45-a46432935dac service nova] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.066021] env[68279]: DEBUG nova.compute.manager [req-a47c15e6-d4c0-4ce4-a54f-25dbdcf56751 req-ecf24a98-2c1e-416c-af45-a46432935dac service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] No waiting events found dispatching network-vif-plugged-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1146.066021] env[68279]: WARNING nova.compute.manager [req-a47c15e6-d4c0-4ce4-a54f-25dbdcf56751 req-ecf24a98-2c1e-416c-af45-a46432935dac service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received unexpected event network-vif-plugged-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 for instance with vm_state building and task_state spawning. [ 1146.098230] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963906, 'name': ReconfigVM_Task, 'duration_secs': 0.496187} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.098525] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Reconfigured VM instance instance-00000062 to attach disk [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba/dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.099219] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49957a0e-6e14-4576-9842-06b7dbdcf34b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.106043] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1146.106043] env[68279]: value = "task-2963909" [ 1146.106043] env[68279]: _type = "Task" [ 1146.106043] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.115143] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963909, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.134179] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963905, 'name': ReconfigVM_Task, 'duration_secs': 0.760698} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.134473] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 2d05e318-abef-43b0-9ad3-8c839c372780/2d05e318-abef-43b0-9ad3-8c839c372780.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.134801] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance '2d05e318-abef-43b0-9ad3-8c839c372780' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1146.164032] env[68279]: DEBUG nova.scheduler.client.report [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.169779] env[68279]: DEBUG nova.network.neutron [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Successfully updated port: 43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1146.308618] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.396294] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963907, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485508} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.396858] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 5a7e2125-3310-4fcb-a281-59b0a2c07f67/5a7e2125-3310-4fcb-a281-59b0a2c07f67.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1146.397147] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1146.397410] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f5f4bcc-ca14-4d57-a164-fe70ae22e7f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.403881] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1146.403881] env[68279]: value = "task-2963910" [ 1146.403881] env[68279]: _type = "Task" [ 1146.403881] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.411755] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963910, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.446157] env[68279]: INFO nova.compute.manager [-] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Took 1.84 seconds to deallocate network for instance. [ 1146.446534] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963908, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087775} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.448450] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1146.452221] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc55a085-dcc0-4dd0-b5d2-27380ffe150f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.474027] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1146.475036] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cb3ef6f-1298-4b43-97df-862c6bbc8fef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.493537] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1146.493537] env[68279]: value = "task-2963911" [ 1146.493537] env[68279]: _type = "Task" [ 1146.493537] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.504302] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963911, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.616243] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963909, 'name': Rename_Task, 'duration_secs': 0.258174} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.616524] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1146.616765] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6a1c567-d13e-4294-a0dc-197cd99091a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.622202] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1146.622202] env[68279]: value = "task-2963912" [ 1146.622202] env[68279]: _type = "Task" [ 1146.622202] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.629554] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.646309] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad0d81b-5f1a-49a9-9a51-e20ede5d8755 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.675852] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.676038] env[68279]: DEBUG nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1146.680139] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.680769] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.680967] env[68279]: DEBUG nova.network.neutron [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1146.682995] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d019951-ceab-4d15-b93c-5f81ae738761 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.701784] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance '2d05e318-abef-43b0-9ad3-8c839c372780' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1146.914328] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963910, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063815} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.915372] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1146.915502] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ef5502-fd23-4bc0-858f-2db711eb2d18 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.937749] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 5a7e2125-3310-4fcb-a281-59b0a2c07f67/5a7e2125-3310-4fcb-a281-59b0a2c07f67.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1146.938026] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f1704bd-3f04-407d-93ce-7169aa245010 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.957087] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.957381] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.957606] env[68279]: DEBUG nova.objects.instance [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lazy-loading 'resources' on Instance uuid e3da334a-1dfc-41d8-8ba8-aabe53924bdc {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.961164] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1146.961164] env[68279]: value = "task-2963913" [ 1146.961164] env[68279]: _type = "Task" [ 1146.961164] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.969413] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963913, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.004487] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963911, 'name': ReconfigVM_Task, 'duration_secs': 0.319653} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.004793] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Reconfigured VM instance instance-00000070 to attach disk [datastore1] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd/d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1147.005520] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93d11de1-72b5-452c-ace3-5a9133553c9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.013160] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1147.013160] env[68279]: value = "task-2963914" [ 1147.013160] env[68279]: _type = "Task" [ 1147.013160] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.021686] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963914, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.132973] env[68279]: DEBUG oslo_vmware.api [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963912, 'name': PowerOnVM_Task, 'duration_secs': 0.488345} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.133312] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1147.185992] env[68279]: DEBUG nova.compute.utils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1147.187371] env[68279]: DEBUG nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1147.188038] env[68279]: DEBUG nova.network.neutron [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1147.237403] env[68279]: DEBUG nova.compute.manager [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1147.238437] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93f5431-2a86-47f7-8099-f84454c6934e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.252507] env[68279]: DEBUG nova.policy [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36b39430ec184c72b8950247fd1added', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37b1b1fd2ea44d83b954e5b90ae9e3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1147.286525] env[68279]: DEBUG nova.network.neutron [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Port 633e0597-c529-4822-bb62-9eb5fe78047c binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1147.313992] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.314321] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-035cf67b-41c7-479d-9032-0baba7eb8726 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.321377] env[68279]: DEBUG oslo_vmware.api [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1147.321377] env[68279]: value = "task-2963915" [ 1147.321377] env[68279]: _type = "Task" [ 1147.321377] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.332327] env[68279]: DEBUG oslo_vmware.api [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963915, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.430060] env[68279]: DEBUG nova.network.neutron [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1147.479432] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963913, 'name': ReconfigVM_Task, 'duration_secs': 0.296142} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.479839] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 5a7e2125-3310-4fcb-a281-59b0a2c07f67/5a7e2125-3310-4fcb-a281-59b0a2c07f67.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1147.480535] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8f25cb5-0407-4120-8222-2ceb6492fb5a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.487150] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1147.487150] env[68279]: value = "task-2963916" [ 1147.487150] env[68279]: _type = "Task" [ 1147.487150] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.496695] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963916, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.523267] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963914, 'name': Rename_Task, 'duration_secs': 0.218917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.523638] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.523926] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c3da847-9932-4aa3-820c-37de4679764b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.535140] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1147.535140] env[68279]: value = "task-2963917" [ 1147.535140] env[68279]: _type = "Task" [ 1147.535140] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.549038] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.549705] env[68279]: DEBUG nova.network.neutron [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Successfully created port: d710ceee-3dc2-4c78-ac1a-742331ebdd5a {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1147.693251] env[68279]: DEBUG nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1147.701364] env[68279]: DEBUG nova.network.neutron [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.717760] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddebd32-da52-4046-9990-cd8bb209cf2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.726531] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa13f492-0b52-4a85-a475-dd55ea65e595 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.763432] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463fb5fc-8bf9-4ccb-9681-7c1558f28c86 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.773708] env[68279]: DEBUG oslo_concurrency.lockutils [None req-daa95f49-9e31-4d99-9fdd-dca1b5851b86 tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.017s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.776611] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abce09aa-e656-4985-9c69-477b77c56f5f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.798150] env[68279]: DEBUG nova.compute.provider_tree [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.831879] env[68279]: DEBUG oslo_vmware.api [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963915, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.998176] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963916, 'name': Rename_Task, 'duration_secs': 0.144658} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.998502] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.999282] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00acfddc-115e-4d3f-9c40-cad2b78df3c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.007023] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1148.007023] env[68279]: value = "task-2963918" [ 1148.007023] env[68279]: _type = "Task" [ 1148.007023] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.016124] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.046379] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963917, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.123496] env[68279]: DEBUG nova.compute.manager [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-changed-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.123720] env[68279]: DEBUG nova.compute.manager [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing instance network info cache due to event network-changed-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1148.123912] env[68279]: DEBUG oslo_concurrency.lockutils [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.168449] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.168697] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.168819] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.168942] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.169142] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.169316] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.169480] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.169642] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1148.169803] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.208511] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.208749] env[68279]: DEBUG nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Instance network_info: |[{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1148.209050] env[68279]: DEBUG oslo_concurrency.lockutils [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.209245] env[68279]: DEBUG nova.network.neutron [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing network info cache for port 43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1148.213636] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:28:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43ccbd4c-94a9-45cd-86db-6ab1445ebaf0', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1148.217915] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1148.219233] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1148.219233] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-263d04b1-22ce-408c-86b3-c9d0ed2309a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.241668] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1148.241668] env[68279]: value = "task-2963919" [ 1148.241668] env[68279]: _type = "Task" [ 1148.241668] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.249636] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963919, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.309887] env[68279]: DEBUG nova.scheduler.client.report [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.320121] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.321052] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.321052] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.332139] env[68279]: DEBUG oslo_vmware.api [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963915, 'name': PowerOnVM_Task, 'duration_secs': 0.751338} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.333319] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.333554] env[68279]: DEBUG nova.compute.manager [None req-6228422b-88a3-41a0-b8f8-a69a197e77d0 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.334360] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd967770-df0a-447e-96c4-01ae24025a39 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.519398] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquiring lock "5827dda0-48a4-4779-b6d2-7fbf73837583" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.519627] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.520707] env[68279]: DEBUG oslo_vmware.api [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963918, 'name': PowerOnVM_Task, 'duration_secs': 0.471411} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.521138] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.521331] env[68279]: INFO nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Took 7.12 seconds to spawn the instance on the hypervisor. [ 1148.521504] env[68279]: DEBUG nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.522518] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca7ebb5-976b-41c3-bcd2-f396ae1217f6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.545660] env[68279]: DEBUG oslo_vmware.api [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963917, 'name': PowerOnVM_Task, 'duration_secs': 0.925982} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.546738] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.546738] env[68279]: DEBUG nova.compute.manager [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.546898] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6030ca17-9f45-4f89-ae59-4e3ba352eda3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.673712] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.706419] env[68279]: DEBUG nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1148.734026] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1148.734262] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1148.734423] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1148.734602] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1148.734751] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1148.734898] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1148.735123] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1148.735286] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1148.735452] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1148.735612] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1148.735782] env[68279]: DEBUG nova.virt.hardware [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1148.736658] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba65b94-1331-4c9c-9c79-e131bcd85ec5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.748730] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e07a08-21f2-4847-be8e-abdf503f37d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.759073] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963919, 'name': CreateVM_Task, 'duration_secs': 0.396436} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.766973] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1148.767416] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.767581] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.767899] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1148.768154] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96242b64-986a-4af7-bbda-d57f61cf7eaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.772525] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1148.772525] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e114b3-e76d-6429-9844-421edb9cf634" [ 1148.772525] env[68279]: _type = "Task" [ 1148.772525] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.780258] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e114b3-e76d-6429-9844-421edb9cf634, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.816019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.859s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.818193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.145s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.818451] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.818531] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1148.819402] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81e7a48-e474-43a3-8457-6528d8b5bad5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.830328] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ec3f7d-b939-4bf3-a13c-1dec00ef7278 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.836676] env[68279]: INFO nova.scheduler.client.report [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleted allocations for instance e3da334a-1dfc-41d8-8ba8-aabe53924bdc [ 1148.858505] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d43c25-ca50-4e3f-a126-5fcf8e7d0b9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.867905] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bddc0f-c047-4729-ad5d-f505a2b1c66a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.900408] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179440MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1148.900571] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.900783] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.938621] env[68279]: DEBUG nova.network.neutron [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updated VIF entry in instance network info cache for port 43ccbd4c-94a9-45cd-86db-6ab1445ebaf0. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1148.938973] env[68279]: DEBUG nova.network.neutron [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.943980] env[68279]: DEBUG nova.compute.manager [req-979374d7-068e-4d4f-88ea-9c43b550a47d req-21ea8291-2dd1-451d-96aa-232710557062 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Received event network-vif-plugged-d710ceee-3dc2-4c78-ac1a-742331ebdd5a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.944209] env[68279]: DEBUG oslo_concurrency.lockutils [req-979374d7-068e-4d4f-88ea-9c43b550a47d req-21ea8291-2dd1-451d-96aa-232710557062 service nova] Acquiring lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.944424] env[68279]: DEBUG oslo_concurrency.lockutils [req-979374d7-068e-4d4f-88ea-9c43b550a47d req-21ea8291-2dd1-451d-96aa-232710557062 service nova] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.944574] env[68279]: DEBUG oslo_concurrency.lockutils [req-979374d7-068e-4d4f-88ea-9c43b550a47d req-21ea8291-2dd1-451d-96aa-232710557062 service nova] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.944733] env[68279]: DEBUG nova.compute.manager [req-979374d7-068e-4d4f-88ea-9c43b550a47d req-21ea8291-2dd1-451d-96aa-232710557062 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] No waiting events found dispatching network-vif-plugged-d710ceee-3dc2-4c78-ac1a-742331ebdd5a {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1148.944913] env[68279]: WARNING nova.compute.manager [req-979374d7-068e-4d4f-88ea-9c43b550a47d req-21ea8291-2dd1-451d-96aa-232710557062 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Received unexpected event network-vif-plugged-d710ceee-3dc2-4c78-ac1a-742331ebdd5a for instance with vm_state building and task_state spawning. [ 1149.021895] env[68279]: DEBUG nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1149.038573] env[68279]: INFO nova.compute.manager [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Took 18.48 seconds to build instance. [ 1149.043671] env[68279]: DEBUG nova.network.neutron [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Successfully updated port: d710ceee-3dc2-4c78-ac1a-742331ebdd5a {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1149.067057] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.283510] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e114b3-e76d-6429-9844-421edb9cf634, 'name': SearchDatastore_Task, 'duration_secs': 0.032286} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.283827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.284066] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1149.284313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.284460] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.284644] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.284918] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e5196a9-7b45-4dfb-a72d-bdba14d3c066 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.295322] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.295495] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1149.296222] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-602a0261-256b-47e4-aa1e-9c69ff7a5e16 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.301632] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1149.301632] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52026607-b6b2-c495-ee5d-919c99cb8779" [ 1149.301632] env[68279]: _type = "Task" [ 1149.301632] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.308693] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52026607-b6b2-c495-ee5d-919c99cb8779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.345950] env[68279]: DEBUG oslo_concurrency.lockutils [None req-890b0b68-e5d1-4ac1-a044-1a24eb40838f tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "e3da334a-1dfc-41d8-8ba8-aabe53924bdc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.370s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.369286] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.369473] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.369649] env[68279]: DEBUG nova.network.neutron [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.441679] env[68279]: DEBUG oslo_concurrency.lockutils [req-be3562c6-e3b6-48cc-b491-b350d627fa46 req-841f681a-3488-421b-a114-55d16918cc76 service nova] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.544307] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2619c3a-750a-4532-b5fd-97b6082cebb7 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.987s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.547028] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "refresh_cache-19f84ce0-5ab0-4749-a94a-3dbae0da8757" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.547028] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "refresh_cache-19f84ce0-5ab0-4749-a94a-3dbae0da8757" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.547028] env[68279]: DEBUG nova.network.neutron [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.550556] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.657370] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.657746] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.658053] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.658626] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.658626] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.662518] env[68279]: INFO nova.compute.manager [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Terminating instance [ 1149.814778] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52026607-b6b2-c495-ee5d-919c99cb8779, 'name': SearchDatastore_Task, 'duration_secs': 0.030971} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.815739] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50487127-dfb3-43da-a34a-00b4340375f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.821976] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1149.821976] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527481b7-c1ff-332c-1083-06b76286069b" [ 1149.821976] env[68279]: _type = "Task" [ 1149.821976] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.831464] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527481b7-c1ff-332c-1083-06b76286069b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.910083] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Applying migration context for instance 2d05e318-abef-43b0-9ad3-8c839c372780 as it has an incoming, in-progress migration eacc2263-2386-415a-9e86-60a262a67b31. Migration status is post-migrating {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1149.911647] env[68279]: INFO nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating resource usage from migration eacc2263-2386-415a-9e86-60a262a67b31 [ 1149.931649] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.931771] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7e34039c-c51a-4f9c-961c-144f6d8a5130 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 84b2828a-e62c-45b2-a5ee-067ca66e626b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 11bbfd41-52bb-410c-b368-1473a309d6a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 7c9c6661-2e52-4dba-8671-26f69d089903 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Migration eacc2263-2386-415a-9e86-60a262a67b31 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 2d05e318-abef-43b0-9ad3-8c839c372780 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 5a7e2125-3310-4fcb-a281-59b0a2c07f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance c07d8d3c-2af3-47b7-87cb-980c7dd0204d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1149.933063] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 19f84ce0-5ab0-4749-a94a-3dbae0da8757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1150.090712] env[68279]: DEBUG nova.network.neutron [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.157582] env[68279]: DEBUG nova.compute.manager [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Received event network-changed-ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1150.159240] env[68279]: DEBUG nova.compute.manager [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Refreshing instance network info cache due to event network-changed-ce309dfa-d75b-46b8-a812-c42760e72418. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1150.159240] env[68279]: DEBUG oslo_concurrency.lockutils [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] Acquiring lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.159240] env[68279]: DEBUG oslo_concurrency.lockutils [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] Acquired lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.159240] env[68279]: DEBUG nova.network.neutron [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Refreshing network info cache for port ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.168179] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "refresh_cache-d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.168369] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "refresh_cache-d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.168567] env[68279]: DEBUG nova.network.neutron [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.184296] env[68279]: DEBUG nova.network.neutron [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance_info_cache with network_info: [{"id": "633e0597-c529-4822-bb62-9eb5fe78047c", "address": "fa:16:3e:4b:15:38", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633e0597-c5", "ovs_interfaceid": "633e0597-c529-4822-bb62-9eb5fe78047c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.271133] env[68279]: DEBUG nova.network.neutron [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Updating instance_info_cache with network_info: [{"id": "d710ceee-3dc2-4c78-ac1a-742331ebdd5a", "address": "fa:16:3e:44:3f:31", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd710ceee-3d", "ovs_interfaceid": "d710ceee-3dc2-4c78-ac1a-742331ebdd5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.335343] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527481b7-c1ff-332c-1083-06b76286069b, 'name': SearchDatastore_Task, 'duration_secs': 0.012562} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.335637] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.335951] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c07d8d3c-2af3-47b7-87cb-980c7dd0204d/c07d8d3c-2af3-47b7-87cb-980c7dd0204d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1150.336242] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-910264c1-d182-4970-b268-da718335b0ab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.343532] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1150.343532] env[68279]: value = "task-2963920" [ 1150.343532] env[68279]: _type = "Task" [ 1150.343532] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.355256] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.436323] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 5827dda0-48a4-4779-b6d2-7fbf73837583 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1150.436599] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1150.436750] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1150.620316] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8b8b6e-35bf-4a5a-abee-e591ea3fe192 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.628824] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccafe27-d1e1-4231-936e-e1401efe3433 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.664512] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711f6095-c606-4ddd-9979-60b6cb08566e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.677326] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23e9966-8550-43c5-bc2e-da067f750103 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.692688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.698345] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.701992] env[68279]: DEBUG nova.network.neutron [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.755919] env[68279]: DEBUG nova.network.neutron [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.774206] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "refresh_cache-19f84ce0-5ab0-4749-a94a-3dbae0da8757" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.774772] env[68279]: DEBUG nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Instance network_info: |[{"id": "d710ceee-3dc2-4c78-ac1a-742331ebdd5a", "address": "fa:16:3e:44:3f:31", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd710ceee-3d", "ovs_interfaceid": "d710ceee-3dc2-4c78-ac1a-742331ebdd5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1150.775266] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:3f:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd710ceee-3dc2-4c78-ac1a-742331ebdd5a', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1150.783267] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1150.783904] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1150.784168] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81e06f09-b078-4802-9785-6828ed03e0e6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.811033] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1150.811033] env[68279]: value = "task-2963922" [ 1150.811033] env[68279]: _type = "Task" [ 1150.811033] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.822254] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963922, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.858287] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963920, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.999901] env[68279]: DEBUG nova.network.neutron [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Updated VIF entry in instance network info cache for port ce309dfa-d75b-46b8-a812-c42760e72418. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.000289] env[68279]: DEBUG nova.network.neutron [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Updating instance_info_cache with network_info: [{"id": "ce309dfa-d75b-46b8-a812-c42760e72418", "address": "fa:16:3e:60:97:a1", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce309dfa-d7", "ovs_interfaceid": "ce309dfa-d75b-46b8-a812-c42760e72418", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.137211] env[68279]: DEBUG nova.compute.manager [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Received event network-changed-d710ceee-3dc2-4c78-ac1a-742331ebdd5a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.137211] env[68279]: DEBUG nova.compute.manager [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Refreshing instance network info cache due to event network-changed-d710ceee-3dc2-4c78-ac1a-742331ebdd5a. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1151.137211] env[68279]: DEBUG oslo_concurrency.lockutils [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] Acquiring lock "refresh_cache-19f84ce0-5ab0-4749-a94a-3dbae0da8757" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.137211] env[68279]: DEBUG oslo_concurrency.lockutils [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] Acquired lock "refresh_cache-19f84ce0-5ab0-4749-a94a-3dbae0da8757" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.137211] env[68279]: DEBUG nova.network.neutron [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Refreshing network info cache for port d710ceee-3dc2-4c78-ac1a-742331ebdd5a {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.201751] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.227770] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aaf8e1f-a13f-4fc7-b2b9-8246b15f771d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.254366] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a0d30e-8b72-457d-ae0b-e3390aa605c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.258468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "refresh_cache-d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.258918] env[68279]: DEBUG nova.compute.manager [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1151.259121] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1151.260302] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdfe326-feed-4ee9-9a4d-f2a29f3cf295 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.267737] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance '2d05e318-abef-43b0-9ad3-8c839c372780' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1151.273933] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1151.275201] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-461987e1-55b6-44ad-8940-5ddbe7b0382f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.281936] env[68279]: DEBUG oslo_vmware.api [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1151.281936] env[68279]: value = "task-2963923" [ 1151.281936] env[68279]: _type = "Task" [ 1151.281936] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.291313] env[68279]: DEBUG oslo_vmware.api [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.321449] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963922, 'name': CreateVM_Task, 'duration_secs': 0.366277} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.322340] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.323077] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.323245] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.323609] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1151.324109] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9e39d41-a7e7-4938-9f36-6c051080c73a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.329701] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1151.329701] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527fea85-88dd-8a14-1b1d-bbc05763bf55" [ 1151.329701] env[68279]: _type = "Task" [ 1151.329701] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.337368] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527fea85-88dd-8a14-1b1d-bbc05763bf55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.353024] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600242} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.353217] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c07d8d3c-2af3-47b7-87cb-980c7dd0204d/c07d8d3c-2af3-47b7-87cb-980c7dd0204d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1151.353425] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1151.353693] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8b92171-57be-4623-92f0-a011e9400f4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.360262] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1151.360262] env[68279]: value = "task-2963924" [ 1151.360262] env[68279]: _type = "Task" [ 1151.360262] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.367614] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.503438] env[68279]: DEBUG oslo_concurrency.lockutils [req-83bd536e-7921-4bab-8759-3d5e96fd9364 req-14e9b23d-6194-49bb-86c5-5e5e659c62c7 service nova] Releasing lock "refresh_cache-5a7e2125-3310-4fcb-a281-59b0a2c07f67" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.706912] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1151.707141] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.806s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.707410] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.641s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.707586] env[68279]: DEBUG nova.objects.instance [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1151.710442] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.776609] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1151.776901] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19fc1e31-b232-4486-9c3e-97cfbdb827e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.786725] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1151.786725] env[68279]: value = "task-2963925" [ 1151.786725] env[68279]: _type = "Task" [ 1151.786725] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.794700] env[68279]: DEBUG oslo_vmware.api [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963923, 'name': PowerOffVM_Task, 'duration_secs': 0.298301} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.795311] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.795481] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.796008] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e71487d-f2ea-4d58-ac25-95394ae80608 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.800057] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.824321] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.824609] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.830373] env[68279]: DEBUG nova.network.neutron [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Updated VIF entry in instance network info cache for port d710ceee-3dc2-4c78-ac1a-742331ebdd5a. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.830737] env[68279]: DEBUG nova.network.neutron [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Updating instance_info_cache with network_info: [{"id": "d710ceee-3dc2-4c78-ac1a-742331ebdd5a", "address": "fa:16:3e:44:3f:31", "network": {"id": "11885cec-4297-4fe0-ad08-7e9662995eb7", "bridge": "br-int", "label": "tempest-ServersTestJSON-2009607364-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37b1b1fd2ea44d83b954e5b90ae9e3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd710ceee-3d", "ovs_interfaceid": "d710ceee-3dc2-4c78-ac1a-742331ebdd5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.836345] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1151.837044] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1151.837044] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleting the datastore file [datastore1] d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.837483] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fefdb44f-2d97-4b79-bb9d-5e4dd315669a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.844864] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527fea85-88dd-8a14-1b1d-bbc05763bf55, 'name': SearchDatastore_Task, 'duration_secs': 0.040799} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.846064] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.846288] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1151.846507] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.846653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.846843] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1151.847185] env[68279]: DEBUG oslo_vmware.api [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1151.847185] env[68279]: value = "task-2963927" [ 1151.847185] env[68279]: _type = "Task" [ 1151.847185] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.847568] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7df17ced-6c07-46e9-9ea2-5bd418022f01 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.859281] env[68279]: DEBUG oslo_vmware.api [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.865650] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1151.865819] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1151.868875] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c160733d-0d71-434d-baf3-de4c8ff4c191 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.870879] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075766} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.871139] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1151.872095] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e781cec7-1675-4766-8300-7d374611f256 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.875217] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1151.875217] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524b5c8f-3f0b-795d-5c49-84b8932ac2e2" [ 1151.875217] env[68279]: _type = "Task" [ 1151.875217] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.895690] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] c07d8d3c-2af3-47b7-87cb-980c7dd0204d/c07d8d3c-2af3-47b7-87cb-980c7dd0204d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.896452] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e31a9ec0-0371-481f-af9e-c85819586503 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.912729] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524b5c8f-3f0b-795d-5c49-84b8932ac2e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.920445] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1151.920445] env[68279]: value = "task-2963928" [ 1151.920445] env[68279]: _type = "Task" [ 1151.920445] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.927866] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963928, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.300819] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963925, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.327727] env[68279]: DEBUG nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1152.335049] env[68279]: DEBUG oslo_concurrency.lockutils [req-2719f25a-f81a-47c8-bbf1-c6493a96143a req-761faf58-9a6e-45a3-9f54-0e00c88f0011 service nova] Releasing lock "refresh_cache-19f84ce0-5ab0-4749-a94a-3dbae0da8757" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.359504] env[68279]: DEBUG oslo_vmware.api [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963927, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219861} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.359883] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.359976] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.360167] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.360340] env[68279]: INFO nova.compute.manager [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1152.360630] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1152.360838] env[68279]: DEBUG nova.compute.manager [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1152.360936] env[68279]: DEBUG nova.network.neutron [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1152.377067] env[68279]: DEBUG nova.network.neutron [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1152.388169] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]524b5c8f-3f0b-795d-5c49-84b8932ac2e2, 'name': SearchDatastore_Task, 'duration_secs': 0.045681} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.389680] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e1bc7d2-3d22-4cdc-98a4-335901aca7ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.395281] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1152.395281] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ef1832-5f9d-fc23-7e76-7262560074a8" [ 1152.395281] env[68279]: _type = "Task" [ 1152.395281] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.403104] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ef1832-5f9d-fc23-7e76-7262560074a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.430267] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.717836] env[68279]: DEBUG oslo_concurrency.lockutils [None req-99f623bb-1dc8-4eb6-959f-db92451051a6 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.719157] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.168s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.722053] env[68279]: INFO nova.compute.claims [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1152.799327] env[68279]: DEBUG oslo_vmware.api [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963925, 'name': PowerOnVM_Task, 'duration_secs': 0.534782} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.799583] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1152.799765] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ef3ac-65e2-4d5a-bb8f-99bfa104ea3e tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance '2d05e318-abef-43b0-9ad3-8c839c372780' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1152.851708] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.883373] env[68279]: DEBUG nova.network.neutron [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.906705] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ef1832-5f9d-fc23-7e76-7262560074a8, 'name': SearchDatastore_Task, 'duration_secs': 0.022847} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.906963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.907231] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 19f84ce0-5ab0-4749-a94a-3dbae0da8757/19f84ce0-5ab0-4749-a94a-3dbae0da8757.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1152.907489] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-145fdc32-e80b-47ff-b90e-f3b27df06476 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.915512] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1152.915512] env[68279]: value = "task-2963929" [ 1152.915512] env[68279]: _type = "Task" [ 1152.915512] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.923864] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.932230] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963928, 'name': ReconfigVM_Task, 'duration_secs': 0.772009} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.932527] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfigured VM instance instance-00000072 to attach disk [datastore1] c07d8d3c-2af3-47b7-87cb-980c7dd0204d/c07d8d3c-2af3-47b7-87cb-980c7dd0204d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.933226] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0258630-ad88-49b5-9b41-6c70aa4d7d7e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.940114] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1152.940114] env[68279]: value = "task-2963930" [ 1152.940114] env[68279]: _type = "Task" [ 1152.940114] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.950879] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963930, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.386525] env[68279]: INFO nova.compute.manager [-] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Took 1.03 seconds to deallocate network for instance. [ 1153.427266] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963929, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.451451] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963930, 'name': Rename_Task, 'duration_secs': 0.257559} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.451826] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.451996] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2dc8c6a-d06c-485c-a7d9-4a19c2afab51 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.461249] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1153.461249] env[68279]: value = "task-2963932" [ 1153.461249] env[68279]: _type = "Task" [ 1153.461249] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.470504] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963932, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.895406] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.932964] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718539} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.933348] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 19f84ce0-5ab0-4749-a94a-3dbae0da8757/19f84ce0-5ab0-4749-a94a-3dbae0da8757.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1153.933645] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1153.933984] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-232f721f-8ec2-4f0d-a62c-94bc75019221 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.945504] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1153.945504] env[68279]: value = "task-2963933" [ 1153.945504] env[68279]: _type = "Task" [ 1153.945504] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.957196] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.970892] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963932, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.059293] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13ed20c-2b41-48a6-9191-827918738026 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.069288] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14c3777-4a88-4b51-a43f-ebcc4a5873c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.099995] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aac3934-14c8-45c1-8742-1421e4143092 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.107665] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a84b29b-d26e-49d1-aa5e-72fd316f36ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.121507] env[68279]: DEBUG nova.compute.provider_tree [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.455757] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.249969} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.456142] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.456866] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf914023-beda-4923-8497-4e5a5fc2eeec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.478710] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 19f84ce0-5ab0-4749-a94a-3dbae0da8757/19f84ce0-5ab0-4749-a94a-3dbae0da8757.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.482429] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-226e7c27-01a6-4295-a769-8f2de92acef1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.503527] env[68279]: DEBUG oslo_vmware.api [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2963932, 'name': PowerOnVM_Task, 'duration_secs': 0.984752} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.504946] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1154.505184] env[68279]: INFO nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Took 9.04 seconds to spawn the instance on the hypervisor. [ 1154.505370] env[68279]: DEBUG nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1154.505834] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1154.505834] env[68279]: value = "task-2963934" [ 1154.505834] env[68279]: _type = "Task" [ 1154.505834] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.506404] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b16efb-018a-4865-b77a-116b97959cd5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.522254] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963934, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.624425] env[68279]: DEBUG nova.scheduler.client.report [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1154.672891] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.027031] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.029980] env[68279]: INFO nova.compute.manager [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Took 16.39 seconds to build instance. [ 1155.065711] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "2d05e318-abef-43b0-9ad3-8c839c372780" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.066059] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.066291] env[68279]: DEBUG nova.compute.manager [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Going to confirm migration 5 {{(pid=68279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1155.131126] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.131265] env[68279]: DEBUG nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1155.135580] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.284s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.140430] env[68279]: INFO nova.compute.claims [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.180560] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.180836] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.181133] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.181335] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.181513] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Cleaning up deleted instances {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1155.426662] env[68279]: DEBUG nova.compute.manager [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-changed-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1155.426890] env[68279]: DEBUG nova.compute.manager [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing instance network info cache due to event network-changed-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1155.427182] env[68279]: DEBUG oslo_concurrency.lockutils [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.427346] env[68279]: DEBUG oslo_concurrency.lockutils [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.427512] env[68279]: DEBUG nova.network.neutron [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing network info cache for port 43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1155.520173] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963934, 'name': ReconfigVM_Task, 'duration_secs': 0.675314} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.520559] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 19f84ce0-5ab0-4749-a94a-3dbae0da8757/19f84ce0-5ab0-4749-a94a-3dbae0da8757.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.521116] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46270503-bd30-4406-9588-5ca19baed6be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.527500] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1155.527500] env[68279]: value = "task-2963936" [ 1155.527500] env[68279]: _type = "Task" [ 1155.527500] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.532209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b869d909-1648-45e9-9525-b59352facaf1 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.903s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.539576] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963936, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.612386] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.612566] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquired lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.612800] env[68279]: DEBUG nova.network.neutron [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.612986] env[68279]: DEBUG nova.objects.instance [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'info_cache' on Instance uuid 2d05e318-abef-43b0-9ad3-8c839c372780 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.648152] env[68279]: DEBUG nova.compute.utils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1155.649472] env[68279]: DEBUG nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1155.651336] env[68279]: DEBUG nova.network.neutron [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1155.699229] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] There are 65 instances to clean {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1155.699410] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: bfc3f843-3295-4381-8c9f-3bad711603fc] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.702531] env[68279]: DEBUG nova.policy [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a8904f5cdda4cccbb448a67cb5c8600', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd71c92734eeb4a6ea05e4c1116c16161', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1155.994301] env[68279]: DEBUG nova.network.neutron [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Successfully created port: 6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1156.039176] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963936, 'name': Rename_Task, 'duration_secs': 0.433254} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.039176] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1156.039176] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21523deb-7816-475f-86e4-c6271c646b6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.046343] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1156.046343] env[68279]: value = "task-2963937" [ 1156.046343] env[68279]: _type = "Task" [ 1156.046343] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.053366] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963937, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.156626] env[68279]: DEBUG nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1156.166070] env[68279]: DEBUG nova.network.neutron [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updated VIF entry in instance network info cache for port 43ccbd4c-94a9-45cd-86db-6ab1445ebaf0. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1156.166433] env[68279]: DEBUG nova.network.neutron [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.207253] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 778efb81-2562-4d55-ace0-09722d92fa5b] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.344712] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fadcafc-8fe2-4c33-9ade-2add873ceac8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.352838] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4030b9-7397-4568-a035-42968fdce57f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.384932] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962f2e7a-7cad-4872-9878-56ade302aa8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.392118] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87670c45-e440-419a-9a95-16a9b530400b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.405580] env[68279]: DEBUG nova.compute.provider_tree [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.557145] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963937, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.661651] env[68279]: INFO nova.virt.block_device [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Booting with volume 47c73841-df53-45da-9226-b82f51a4c434 at /dev/sda [ 1156.670153] env[68279]: DEBUG oslo_concurrency.lockutils [req-65217c23-8eb6-4f01-9a9d-d257639e8e77 req-5ff123c8-8cdc-4e53-8fd0-c3117c94ae61 service nova] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.701491] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f47035e7-daf5-4055-8e86-bb754c6d9d56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.710966] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6352ef-54a8-4ffc-8077-cc888e5f67f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.722524] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: fddf4cb7-cffb-41bb-9806-b8f69579cfef] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.747023] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2322285d-895b-4078-83df-84e690b6ae7a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.758508] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b8bd5e-e7e2-4568-97bb-c224b7b7d173 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.791676] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3c6def-6971-4ab1-b47a-753c74d9c0f3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.798801] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e44ccf9-c7e1-4da1-bf97-88fb1d33f179 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.818732] env[68279]: DEBUG nova.virt.block_device [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updating existing volume attachment record: b9f3fd75-a9fa-4d16-8798-a5d9f44d5c71 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1156.838094] env[68279]: DEBUG nova.network.neutron [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance_info_cache with network_info: [{"id": "633e0597-c529-4822-bb62-9eb5fe78047c", "address": "fa:16:3e:4b:15:38", "network": {"id": "8af729a1-23c5-4040-8ca4-ddd3d8b12103", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-889826244-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd517424aba641e4b867e440ba0ee7ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633e0597-c5", "ovs_interfaceid": "633e0597-c529-4822-bb62-9eb5fe78047c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.909548] env[68279]: DEBUG nova.scheduler.client.report [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.056357] env[68279]: DEBUG oslo_vmware.api [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963937, 'name': PowerOnVM_Task, 'duration_secs': 0.841123} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.056629] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.056869] env[68279]: INFO nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1157.057096] env[68279]: DEBUG nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1157.058197] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2d8767-e005-41dd-bd8c-6cac40fbf6a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.225682] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 296358b1-e978-409c-8113-587ae8f806c7] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.341475] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Releasing lock "refresh_cache-2d05e318-abef-43b0-9ad3-8c839c372780" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.341761] env[68279]: DEBUG nova.objects.instance [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lazy-loading 'migration_context' on Instance uuid 2d05e318-abef-43b0-9ad3-8c839c372780 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.414753] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.415352] env[68279]: DEBUG nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1157.418268] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.523s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.418639] env[68279]: DEBUG nova.objects.instance [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lazy-loading 'resources' on Instance uuid d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.576895] env[68279]: INFO nova.compute.manager [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Took 13.32 seconds to build instance. [ 1157.584215] env[68279]: DEBUG nova.compute.manager [req-ab523366-bc69-4252-8620-c98dfcca9a08 req-f0f15411-fff9-46db-a35f-cb54c4a873e9 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Received event network-vif-plugged-6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1157.584626] env[68279]: DEBUG oslo_concurrency.lockutils [req-ab523366-bc69-4252-8620-c98dfcca9a08 req-f0f15411-fff9-46db-a35f-cb54c4a873e9 service nova] Acquiring lock "5827dda0-48a4-4779-b6d2-7fbf73837583-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.584723] env[68279]: DEBUG oslo_concurrency.lockutils [req-ab523366-bc69-4252-8620-c98dfcca9a08 req-f0f15411-fff9-46db-a35f-cb54c4a873e9 service nova] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.584981] env[68279]: DEBUG oslo_concurrency.lockutils [req-ab523366-bc69-4252-8620-c98dfcca9a08 req-f0f15411-fff9-46db-a35f-cb54c4a873e9 service nova] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.585074] env[68279]: DEBUG nova.compute.manager [req-ab523366-bc69-4252-8620-c98dfcca9a08 req-f0f15411-fff9-46db-a35f-cb54c4a873e9 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] No waiting events found dispatching network-vif-plugged-6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1157.585210] env[68279]: WARNING nova.compute.manager [req-ab523366-bc69-4252-8620-c98dfcca9a08 req-f0f15411-fff9-46db-a35f-cb54c4a873e9 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Received unexpected event network-vif-plugged-6a8f6f79-2136-4be2-a2a5-cb6b32370b13 for instance with vm_state building and task_state block_device_mapping. [ 1157.604941] env[68279]: DEBUG nova.network.neutron [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Successfully updated port: 6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1157.729312] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 66b2ce98-9a8a-4344-bd7d-80b7fa001344] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.845056] env[68279]: DEBUG nova.objects.base [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Object Instance<2d05e318-abef-43b0-9ad3-8c839c372780> lazy-loaded attributes: info_cache,migration_context {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1157.845753] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef769dd-cf83-4910-9ef3-d42ec3127f87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.866749] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-808a63a0-f2a1-4f22-b82a-774c2e8fd56a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.873847] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1157.873847] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bb8c3e-7ffe-e434-3278-717b96e93389" [ 1157.873847] env[68279]: _type = "Task" [ 1157.873847] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.881497] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bb8c3e-7ffe-e434-3278-717b96e93389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.922086] env[68279]: DEBUG nova.compute.utils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1157.923367] env[68279]: DEBUG nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1157.923536] env[68279]: DEBUG nova.network.neutron [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1157.963647] env[68279]: DEBUG nova.policy [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4815a67fd1c410f82905f7ebe2a4c9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e02764848813428dbe0f88c32ad935ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1158.080919] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4c028f57-5626-4700-92e0-831077f2afe0 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.830s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.107452] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquiring lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.107597] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquired lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.107750] env[68279]: DEBUG nova.network.neutron [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.111380] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe772d29-df52-4c76-b388-f99d55a78391 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.119522] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa44784-3418-4448-8df2-8e950b596ec1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.149613] env[68279]: DEBUG oslo_concurrency.lockutils [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.149856] env[68279]: DEBUG oslo_concurrency.lockutils [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.150037] env[68279]: DEBUG nova.compute.manager [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1158.151185] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35759fb0-d716-4826-b25b-d6c8119fd6f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.154058] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5a5021-db34-4340-8d5a-b207409670d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.159677] env[68279]: DEBUG nova.compute.manager [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1158.160268] env[68279]: DEBUG nova.objects.instance [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lazy-loading 'flavor' on Instance uuid 19f84ce0-5ab0-4749-a94a-3dbae0da8757 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.164274] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9bd620-779c-4eff-a7a8-bd5b5acece06 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.177250] env[68279]: DEBUG nova.compute.provider_tree [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.233042] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 6b4b59d2-e1b0-46b8-a0a1-948c4587f0b5] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.249163] env[68279]: DEBUG nova.network.neutron [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Successfully created port: abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1158.383823] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52bb8c3e-7ffe-e434-3278-717b96e93389, 'name': SearchDatastore_Task, 'duration_secs': 0.022135} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.384208] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.431021] env[68279]: DEBUG nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1158.638800] env[68279]: DEBUG nova.network.neutron [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1158.680263] env[68279]: DEBUG nova.scheduler.client.report [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1158.736432] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e3da334a-1dfc-41d8-8ba8-aabe53924bdc] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.777338] env[68279]: DEBUG nova.network.neutron [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updating instance_info_cache with network_info: [{"id": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "address": "fa:16:3e:3f:b4:50", "network": {"id": "4db621f3-3723-433f-abd0-ea52181c429b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-44941193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d71c92734eeb4a6ea05e4c1116c16161", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a8f6f79-21", "ovs_interfaceid": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.913374] env[68279]: DEBUG nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1158.913992] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1158.914258] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1158.914449] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1158.914662] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1158.914839] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1158.915066] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1158.915365] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1158.915545] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1158.915723] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1158.915892] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1158.916085] env[68279]: DEBUG nova.virt.hardware [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1158.917008] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47582210-89f5-4952-ad23-3b563d5b6c41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.925939] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e0970e-ce72-4dbe-9fc0-f72ccdf3725a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.169035] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1159.169222] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a11ad34-7402-432d-8cec-3785908a04b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.177363] env[68279]: DEBUG oslo_vmware.api [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1159.177363] env[68279]: value = "task-2963939" [ 1159.177363] env[68279]: _type = "Task" [ 1159.177363] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.185361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.187266] env[68279]: DEBUG oslo_vmware.api [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.187709] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.804s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.211053] env[68279]: INFO nova.scheduler.client.report [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleted allocations for instance d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd [ 1159.239989] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 525e4894-a8b1-45ae-a846-84ded8d97584] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.279582] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Releasing lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.279891] env[68279]: DEBUG nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance network_info: |[{"id": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "address": "fa:16:3e:3f:b4:50", "network": {"id": "4db621f3-3723-433f-abd0-ea52181c429b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-44941193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d71c92734eeb4a6ea05e4c1116c16161", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a8f6f79-21", "ovs_interfaceid": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1159.280335] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:b4:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ccbc7a-cf8d-4ea2-8411-291a1e27df7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a8f6f79-2136-4be2-a2a5-cb6b32370b13', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1159.288197] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Creating folder: Project (d71c92734eeb4a6ea05e4c1116c16161). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1159.288742] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3761d36-e54f-4030-971f-e5878b2ef5bd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.302968] env[68279]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1159.303142] env[68279]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68279) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1159.303470] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Folder already exists: Project (d71c92734eeb4a6ea05e4c1116c16161). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1159.303649] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Creating folder: Instances. Parent ref: group-v594742. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1159.304280] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9cc7ecc-e1d0-4c28-a930-5aa781f0a295 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.314822] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Created folder: Instances in parent group-v594742. [ 1159.315215] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1159.315313] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1159.315536] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7eea449a-27c8-43a3-82c0-8a6e4a7cfdd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.336205] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1159.336205] env[68279]: value = "task-2963942" [ 1159.336205] env[68279]: _type = "Task" [ 1159.336205] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.344285] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963942, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.445524] env[68279]: DEBUG nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1159.475974] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.476341] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.476639] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.476921] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.477042] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.477263] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.477502] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.477662] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.477830] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.477991] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.478177] env[68279]: DEBUG nova.virt.hardware [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.479159] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6e84e5-4c2b-4a5b-ac41-1bb30a5d451e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.487054] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5479008-093b-4eca-bfe2-f6759bb04f7f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.616885] env[68279]: DEBUG nova.compute.manager [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Received event network-changed-6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.617176] env[68279]: DEBUG nova.compute.manager [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Refreshing instance network info cache due to event network-changed-6a8f6f79-2136-4be2-a2a5-cb6b32370b13. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1159.617385] env[68279]: DEBUG oslo_concurrency.lockutils [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] Acquiring lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.617566] env[68279]: DEBUG oslo_concurrency.lockutils [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] Acquired lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.617782] env[68279]: DEBUG nova.network.neutron [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Refreshing network info cache for port 6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.688415] env[68279]: DEBUG oslo_vmware.api [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963939, 'name': PowerOffVM_Task, 'duration_secs': 0.222459} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.688782] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.689064] env[68279]: DEBUG nova.compute.manager [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1159.689941] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea0efe5-9030-477e-9aed-7829cf67d78a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.719238] env[68279]: DEBUG nova.network.neutron [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Successfully updated port: abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1159.720338] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eca9d6f3-f788-4bbb-9281-fb13c5da55df tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.062s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.743803] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: f6a65d1b-ba9c-44b7-b9aa-815cabd45176] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.831780] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.832033] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.853439] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963942, 'name': CreateVM_Task, 'duration_secs': 0.350779} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.853439] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1159.854185] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594751', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'name': 'volume-47c73841-df53-45da-9226-b82f51a4c434', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5827dda0-48a4-4779-b6d2-7fbf73837583', 'attached_at': '', 'detached_at': '', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'serial': '47c73841-df53-45da-9226-b82f51a4c434'}, 'attachment_id': 'b9f3fd75-a9fa-4d16-8798-a5d9f44d5c71', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=68279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1159.854478] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Root volume attach. Driver type: vmdk {{(pid=68279) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1159.857815] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c48b3f-5624-47b3-b1b6-d2744e860fdb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.868039] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a8e2d1-8ad0-4783-a2ef-4ed94e56699e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.880222] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb2f64a-cf0c-496b-a748-933e9361ad41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.890751] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b995d279-d185-45be-b286-0a3262c44e7d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.899165] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1159.899165] env[68279]: value = "task-2963944" [ 1159.899165] env[68279]: _type = "Task" [ 1159.899165] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.904517] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c2910f-f705-4381-8388-9a164777a3d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.919079] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126cd7e2-c3c6-4154-a47f-974f3f237924 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.922366] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.957699] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3491febd-ef4b-4641-b5e3-68013ea845ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.966287] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55faa47b-cff4-4194-b60e-f6a64b2f66bc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.982566] env[68279]: DEBUG nova.compute.provider_tree [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.204411] env[68279]: DEBUG oslo_concurrency.lockutils [None req-10bfd81a-1305-4f5d-b921-805b5838776b tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.222809] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.222809] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.222809] env[68279]: DEBUG nova.network.neutron [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1160.250489] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 0b85c3a6-f413-49b1-9936-222117368995] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.335311] env[68279]: DEBUG nova.compute.utils [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1160.348841] env[68279]: DEBUG nova.network.neutron [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updated VIF entry in instance network info cache for port 6a8f6f79-2136-4be2-a2a5-cb6b32370b13. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.349228] env[68279]: DEBUG nova.network.neutron [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updating instance_info_cache with network_info: [{"id": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "address": "fa:16:3e:3f:b4:50", "network": {"id": "4db621f3-3723-433f-abd0-ea52181c429b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-44941193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d71c92734eeb4a6ea05e4c1116c16161", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a8f6f79-21", "ovs_interfaceid": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.412151] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task} progress is 42%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.487209] env[68279]: DEBUG nova.scheduler.client.report [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.753385] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 594af7a0-1d0a-43ca-947a-8c5614a289d9] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.757509] env[68279]: DEBUG nova.network.neutron [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1160.838020] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.851874] env[68279]: DEBUG oslo_concurrency.lockutils [req-04b07ffc-f988-4701-8209-3f618a56fbc5 req-c6583f3d-58df-4bfa-9e33-6af7ab289f0d service nova] Releasing lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.900787] env[68279]: DEBUG nova.network.neutron [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Updating instance_info_cache with network_info: [{"id": "abfe62ee-1d24-42d6-9928-6425596e2a97", "address": "fa:16:3e:40:27:65", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabfe62ee-1d", "ovs_interfaceid": "abfe62ee-1d24-42d6-9928-6425596e2a97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.914625] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task} progress is 56%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.257539] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: f38a489d-ddcb-4a66-bb60-058d46ed69db] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.406579] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.406895] env[68279]: DEBUG nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Instance network_info: |[{"id": "abfe62ee-1d24-42d6-9928-6425596e2a97", "address": "fa:16:3e:40:27:65", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabfe62ee-1d", "ovs_interfaceid": "abfe62ee-1d24-42d6-9928-6425596e2a97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1161.407318] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:27:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd19577c9-1b2e-490b-8031-2f278dd3f570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abfe62ee-1d24-42d6-9928-6425596e2a97', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1161.415615] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1161.417725] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1161.420384] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a6a6b1b-65ce-49b9-984c-777e2a363445 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.440045] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task} progress is 73%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.446845] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1161.446845] env[68279]: value = "task-2963945" [ 1161.446845] env[68279]: _type = "Task" [ 1161.446845] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.455766] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963945, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.499128] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.311s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.762014] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: f4963730-d516-48b7-a320-8af731831a30] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.869198] env[68279]: DEBUG nova.compute.manager [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Received event network-vif-plugged-abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.869429] env[68279]: DEBUG oslo_concurrency.lockutils [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.869679] env[68279]: DEBUG oslo_concurrency.lockutils [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] Lock "79905489-388d-4540-bdff-1c6a02f8bebd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.869871] env[68279]: DEBUG oslo_concurrency.lockutils [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] Lock "79905489-388d-4540-bdff-1c6a02f8bebd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.870026] env[68279]: DEBUG nova.compute.manager [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] No waiting events found dispatching network-vif-plugged-abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1161.870200] env[68279]: WARNING nova.compute.manager [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Received unexpected event network-vif-plugged-abfe62ee-1d24-42d6-9928-6425596e2a97 for instance with vm_state building and task_state spawning. [ 1161.870409] env[68279]: DEBUG nova.compute.manager [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Received event network-changed-abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.870654] env[68279]: DEBUG nova.compute.manager [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Refreshing instance network info cache due to event network-changed-abfe62ee-1d24-42d6-9928-6425596e2a97. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1161.870877] env[68279]: DEBUG oslo_concurrency.lockutils [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] Acquiring lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.871054] env[68279]: DEBUG oslo_concurrency.lockutils [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] Acquired lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.871187] env[68279]: DEBUG nova.network.neutron [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Refreshing network info cache for port abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.913512] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task} progress is 86%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.936882] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.937162] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.937409] env[68279]: INFO nova.compute.manager [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Attaching volume 40f025bf-52f1-4b53-8f40-cf05a0b194d2 to /dev/sdb [ 1161.962909] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963945, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.981882] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4d3151-f004-47f7-bc44-71482b0946d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.989572] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e608ed-6d62-4162-8c4a-518f27de5692 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.007329] env[68279]: DEBUG nova.virt.block_device [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updating existing volume attachment record: 66a5e82c-d1df-4584-8a0c-cf96d558a86f {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1162.069287] env[68279]: INFO nova.scheduler.client.report [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted allocation for migration eacc2263-2386-415a-9e86-60a262a67b31 [ 1162.265378] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: f03dc0f0-3c02-4a4e-a1d5-a2f84a56481b] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.415788] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task} progress is 97%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.440474] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "7c9c6661-2e52-4dba-8671-26f69d089903" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.440793] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "7c9c6661-2e52-4dba-8671-26f69d089903" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.441080] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "7c9c6661-2e52-4dba-8671-26f69d089903-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.441332] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "7c9c6661-2e52-4dba-8671-26f69d089903-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.441592] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "7c9c6661-2e52-4dba-8671-26f69d089903-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.443706] env[68279]: INFO nova.compute.manager [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Terminating instance [ 1162.457737] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963945, 'name': CreateVM_Task} progress is 25%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.575166] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.509s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.590709] env[68279]: DEBUG nova.network.neutron [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Updated VIF entry in instance network info cache for port abfe62ee-1d24-42d6-9928-6425596e2a97. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.591139] env[68279]: DEBUG nova.network.neutron [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Updating instance_info_cache with network_info: [{"id": "abfe62ee-1d24-42d6-9928-6425596e2a97", "address": "fa:16:3e:40:27:65", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabfe62ee-1d", "ovs_interfaceid": "abfe62ee-1d24-42d6-9928-6425596e2a97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.768769] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: a96ea5b4-39c5-4a24-873f-54480f876fbf] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.793369] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.793632] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.793867] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.794041] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.794223] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.797446] env[68279]: INFO nova.compute.manager [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Terminating instance [ 1162.913435] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task} progress is 98%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.948032] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "refresh_cache-7c9c6661-2e52-4dba-8671-26f69d089903" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.948196] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquired lock "refresh_cache-7c9c6661-2e52-4dba-8671-26f69d089903" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.948358] env[68279]: DEBUG nova.network.neutron [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1162.962049] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963945, 'name': CreateVM_Task, 'duration_secs': 1.226985} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.963195] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1162.964202] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.964536] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.965031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1162.965758] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-733435cf-7c80-47a5-ae91-f1509f04e843 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.972086] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1162.972086] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5250b7ee-fc3a-2fed-2391-9cb7e54c491d" [ 1162.972086] env[68279]: _type = "Task" [ 1162.972086] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.983246] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5250b7ee-fc3a-2fed-2391-9cb7e54c491d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.093949] env[68279]: DEBUG oslo_concurrency.lockutils [req-87b52ea6-aee3-4e70-b91d-fa26a8c0bd38 req-300e161d-008f-48a3-bbda-c68f83436e7a service nova] Releasing lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.273119] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e1b12b1c-5755-41eb-b550-88c573a09877] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.301237] env[68279]: DEBUG nova.compute.manager [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1163.301456] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1163.302325] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2a8067-2466-444a-96f7-163e0d048afb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.309951] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.310198] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebb740dc-45b5-4a0d-8bf3-6cc7c4107fd9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.414411] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963944, 'name': RelocateVM_Task, 'duration_secs': 3.125012} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.414679] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1163.414880] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594751', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'name': 'volume-47c73841-df53-45da-9226-b82f51a4c434', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5827dda0-48a4-4779-b6d2-7fbf73837583', 'attached_at': '', 'detached_at': '', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'serial': '47c73841-df53-45da-9226-b82f51a4c434'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1163.415722] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8209f2-aa57-4df0-9dd4-c09b612ed84d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.431611] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3435a1e7-c4bc-40bb-afff-e875f24ec14f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.453282] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] volume-47c73841-df53-45da-9226-b82f51a4c434/volume-47c73841-df53-45da-9226-b82f51a4c434.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.453483] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab61645d-e5b3-473d-96bd-dbb1ce2b13f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.476772] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1163.476772] env[68279]: value = "task-2963949" [ 1163.476772] env[68279]: _type = "Task" [ 1163.476772] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.483709] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5250b7ee-fc3a-2fed-2391-9cb7e54c491d, 'name': SearchDatastore_Task, 'duration_secs': 0.038824} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.483942] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.484194] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1163.484554] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.484554] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.484722] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1163.484963] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc7a4edc-d9ef-429c-adaa-915f87f55c0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.489687] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963949, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.490481] env[68279]: DEBUG nova.network.neutron [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1163.499567] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1163.499729] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1163.500475] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07ccf90a-79b0-4585-9247-f0b1527f1e28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.505855] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1163.505855] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a815b2-c129-a783-fad6-878833b80c8c" [ 1163.505855] env[68279]: _type = "Task" [ 1163.505855] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.515652] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a815b2-c129-a783-fad6-878833b80c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.546447] env[68279]: DEBUG nova.network.neutron [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.645282] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1163.645532] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1163.645762] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleting the datastore file [datastore1] 19f84ce0-5ab0-4749-a94a-3dbae0da8757 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.646096] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebf45821-cabb-4353-8c5d-be55d6acc13b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.653635] env[68279]: DEBUG oslo_vmware.api [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1163.653635] env[68279]: value = "task-2963951" [ 1163.653635] env[68279]: _type = "Task" [ 1163.653635] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.663471] env[68279]: DEBUG oslo_vmware.api [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963951, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.776701] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 50f390b2-99b7-49f3-997f-7d7b50cff9f2] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.987023] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963949, 'name': ReconfigVM_Task, 'duration_secs': 0.293434} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.987272] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Reconfigured VM instance instance-00000074 to attach disk [datastore2] volume-47c73841-df53-45da-9226-b82f51a4c434/volume-47c73841-df53-45da-9226-b82f51a4c434.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1163.992201] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3c28b1c-1c21-4696-a434-f4da1a496b14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.006870] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1164.006870] env[68279]: value = "task-2963952" [ 1164.006870] env[68279]: _type = "Task" [ 1164.006870] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.017171] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963952, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.020387] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a815b2-c129-a783-fad6-878833b80c8c, 'name': SearchDatastore_Task, 'duration_secs': 0.011975} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.021136] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-344bb969-e04f-40bf-ae30-07e5e83094e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.025451] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1164.025451] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5227c299-92b3-4955-526a-06b55ccc16d3" [ 1164.025451] env[68279]: _type = "Task" [ 1164.025451] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.032950] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5227c299-92b3-4955-526a-06b55ccc16d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.049633] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Releasing lock "refresh_cache-7c9c6661-2e52-4dba-8671-26f69d089903" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.050047] env[68279]: DEBUG nova.compute.manager [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1164.050263] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1164.051081] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443263a1-19b0-47c9-9238-b5f14a643e14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.057591] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.058132] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6ea21c4-d32b-4e90-a0e3-786c7cc57106 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.064327] env[68279]: DEBUG oslo_vmware.api [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1164.064327] env[68279]: value = "task-2963953" [ 1164.064327] env[68279]: _type = "Task" [ 1164.064327] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.071938] env[68279]: DEBUG oslo_vmware.api [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.095567] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "2d05e318-abef-43b0-9ad3-8c839c372780" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.095802] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.096017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.096231] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.096413] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.098720] env[68279]: INFO nova.compute.manager [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Terminating instance [ 1164.163495] env[68279]: DEBUG oslo_vmware.api [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963951, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.309596} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.163824] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.163989] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.164218] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.164412] env[68279]: INFO nova.compute.manager [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Took 0.86 seconds to destroy the instance on the hypervisor. [ 1164.164683] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.164917] env[68279]: DEBUG nova.compute.manager [-] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1164.165066] env[68279]: DEBUG nova.network.neutron [-] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1164.280895] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: efda54fe-09a3-4653-b16a-8b3cdd4849c5] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.439679] env[68279]: DEBUG nova.compute.manager [req-6791ba2f-07a8-423a-9a4d-0c559df93b1d req-7542836c-6731-4632-8aa7-a904e5a5fec8 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Received event network-vif-deleted-d710ceee-3dc2-4c78-ac1a-742331ebdd5a {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1164.439884] env[68279]: INFO nova.compute.manager [req-6791ba2f-07a8-423a-9a4d-0c559df93b1d req-7542836c-6731-4632-8aa7-a904e5a5fec8 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Neutron deleted interface d710ceee-3dc2-4c78-ac1a-742331ebdd5a; detaching it from the instance and deleting it from the info cache [ 1164.440067] env[68279]: DEBUG nova.network.neutron [req-6791ba2f-07a8-423a-9a4d-0c559df93b1d req-7542836c-6731-4632-8aa7-a904e5a5fec8 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.519224] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963952, 'name': ReconfigVM_Task, 'duration_secs': 0.140299} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.519516] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594751', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'name': 'volume-47c73841-df53-45da-9226-b82f51a4c434', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5827dda0-48a4-4779-b6d2-7fbf73837583', 'attached_at': '', 'detached_at': '', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'serial': '47c73841-df53-45da-9226-b82f51a4c434'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1164.520166] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1afd64a-13d2-44d1-adef-f337b9ccba0d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.525865] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1164.525865] env[68279]: value = "task-2963955" [ 1164.525865] env[68279]: _type = "Task" [ 1164.525865] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.536720] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5227c299-92b3-4955-526a-06b55ccc16d3, 'name': SearchDatastore_Task, 'duration_secs': 0.013351} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.539779] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.539994] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 79905489-388d-4540-bdff-1c6a02f8bebd/79905489-388d-4540-bdff-1c6a02f8bebd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1164.540264] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963955, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.540468] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46a5f3c4-1654-4da0-a2da-32860cd16453 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.546988] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1164.546988] env[68279]: value = "task-2963956" [ 1164.546988] env[68279]: _type = "Task" [ 1164.546988] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.557144] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.574317] env[68279]: DEBUG oslo_vmware.api [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963953, 'name': PowerOffVM_Task, 'duration_secs': 0.114473} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.575063] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1164.575063] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1164.575237] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dcde923e-54c5-4719-9494-ff2683d39c19 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.599208] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1164.599441] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1164.599629] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleting the datastore file [datastore2] 7c9c6661-2e52-4dba-8671-26f69d089903 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1164.599892] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf51e765-765f-47da-8e72-feb6973a657a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.602523] env[68279]: DEBUG nova.compute.manager [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1164.602834] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1164.603803] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f18825-fa02-4ee3-a2a3-ec5471a41cbb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.611219] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.612527] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b32a35fd-3dcb-4b23-b8b0-4a388352e28c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.614260] env[68279]: DEBUG oslo_vmware.api [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for the task: (returnval){ [ 1164.614260] env[68279]: value = "task-2963958" [ 1164.614260] env[68279]: _type = "Task" [ 1164.614260] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.618725] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1164.618725] env[68279]: value = "task-2963959" [ 1164.618725] env[68279]: _type = "Task" [ 1164.618725] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.624764] env[68279]: DEBUG oslo_vmware.api [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.629688] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.784998] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 4090e245-b026-4d3a-b7f0-e61543701d8f] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.921027] env[68279]: DEBUG nova.network.neutron [-] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.942976] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4dab0db-a666-4992-9caf-699ff352ff2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.956394] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd4b506-98a7-4214-a951-af63536a84e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.998039] env[68279]: DEBUG nova.compute.manager [req-6791ba2f-07a8-423a-9a4d-0c559df93b1d req-7542836c-6731-4632-8aa7-a904e5a5fec8 service nova] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Detach interface failed, port_id=d710ceee-3dc2-4c78-ac1a-742331ebdd5a, reason: Instance 19f84ce0-5ab0-4749-a94a-3dbae0da8757 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1165.037494] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963955, 'name': Rename_Task, 'duration_secs': 0.138366} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.037742] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1165.037978] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27fbd8aa-342f-4452-a152-d6f1192a2602 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.044386] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1165.044386] env[68279]: value = "task-2963960" [ 1165.044386] env[68279]: _type = "Task" [ 1165.044386] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.054049] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963960, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.058591] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963956, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488267} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.059479] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] 79905489-388d-4540-bdff-1c6a02f8bebd/79905489-388d-4540-bdff-1c6a02f8bebd.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1165.059479] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1165.059479] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7da3f661-d661-495b-9038-ca8c76e0cfc4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.064786] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1165.064786] env[68279]: value = "task-2963961" [ 1165.064786] env[68279]: _type = "Task" [ 1165.064786] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.072564] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963961, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.127229] env[68279]: DEBUG oslo_vmware.api [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Task: {'id': task-2963958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105848} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.127850] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1165.128086] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1165.128283] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1165.128461] env[68279]: INFO nova.compute.manager [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1165.128707] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1165.128906] env[68279]: DEBUG nova.compute.manager [-] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1165.129017] env[68279]: DEBUG nova.network.neutron [-] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1165.133367] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963959, 'name': PowerOffVM_Task, 'duration_secs': 0.173185} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.133911] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1165.134064] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1165.134297] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da2fbead-7d16-435c-9c6b-c12f57d95f37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.147522] env[68279]: DEBUG nova.network.neutron [-] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1165.196417] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1165.196606] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1165.196774] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleting the datastore file [datastore1] 2d05e318-abef-43b0-9ad3-8c839c372780 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1165.197038] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bf704dc-0b26-44ad-86d3-3afa1b474591 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.204058] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for the task: (returnval){ [ 1165.204058] env[68279]: value = "task-2963963" [ 1165.204058] env[68279]: _type = "Task" [ 1165.204058] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.211667] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.288374] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 4dd80f75-13d0-43d7-8042-b175dff50250] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.424371] env[68279]: INFO nova.compute.manager [-] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Took 1.26 seconds to deallocate network for instance. [ 1165.553739] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.573566] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963961, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066184} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.573830] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1165.574584] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0af1d2-76b3-4865-b70c-bf61babc007f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.596057] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 79905489-388d-4540-bdff-1c6a02f8bebd/79905489-388d-4540-bdff-1c6a02f8bebd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1165.596608] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eae1077e-6a10-4d29-92ea-355db3add1b0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.615965] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1165.615965] env[68279]: value = "task-2963964" [ 1165.615965] env[68279]: _type = "Task" [ 1165.615965] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.622755] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963964, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.650052] env[68279]: DEBUG nova.network.neutron [-] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.714058] env[68279]: DEBUG oslo_vmware.api [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Task: {'id': task-2963963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157164} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.714351] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1165.714556] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1165.714747] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1165.714924] env[68279]: INFO nova.compute.manager [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1165.715409] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1165.715634] env[68279]: DEBUG nova.compute.manager [-] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1165.715743] env[68279]: DEBUG nova.network.neutron [-] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1165.791903] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 1bd92b53-46c0-4b63-be20-857cffed87cd] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.930689] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.930982] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.931223] env[68279]: DEBUG nova.objects.instance [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lazy-loading 'resources' on Instance uuid 19f84ce0-5ab0-4749-a94a-3dbae0da8757 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.054767] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.125558] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963964, 'name': ReconfigVM_Task, 'duration_secs': 0.360808} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.125558] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 79905489-388d-4540-bdff-1c6a02f8bebd/79905489-388d-4540-bdff-1c6a02f8bebd.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1166.126168] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9df80ce-d84f-4e16-a527-e89d2b4c0f1e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.131674] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1166.131674] env[68279]: value = "task-2963966" [ 1166.131674] env[68279]: _type = "Task" [ 1166.131674] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.141134] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963966, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.152783] env[68279]: INFO nova.compute.manager [-] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Took 1.02 seconds to deallocate network for instance. [ 1166.295630] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 0731fdf9-f90c-46a4-9165-f6d91767b51b] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.467016] env[68279]: DEBUG nova.compute.manager [req-e4f1263d-80d3-4f87-86d1-2b17d8772bd6 req-88be5db0-d61d-42b7-bda3-064cd480d61b service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Received event network-vif-deleted-633e0597-c529-4822-bb62-9eb5fe78047c {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.467237] env[68279]: INFO nova.compute.manager [req-e4f1263d-80d3-4f87-86d1-2b17d8772bd6 req-88be5db0-d61d-42b7-bda3-064cd480d61b service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Neutron deleted interface 633e0597-c529-4822-bb62-9eb5fe78047c; detaching it from the instance and deleting it from the info cache [ 1166.467408] env[68279]: DEBUG nova.network.neutron [req-e4f1263d-80d3-4f87-86d1-2b17d8772bd6 req-88be5db0-d61d-42b7-bda3-064cd480d61b service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.475048] env[68279]: DEBUG nova.network.neutron [-] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.556517] env[68279]: DEBUG oslo_vmware.api [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2963960, 'name': PowerOnVM_Task, 'duration_secs': 1.060189} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.556790] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1166.557100] env[68279]: INFO nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1166.557176] env[68279]: DEBUG nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.558088] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57319c8-24fc-49f7-91d9-092f4c6723cf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.563181] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1166.563379] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594761', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'name': 'volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'serial': '40f025bf-52f1-4b53-8f40-cf05a0b194d2'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1166.564086] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd174ce-424c-4052-ad9c-ec1a90447a98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.586501] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec24714-35b2-480d-b092-66d9310aac93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.610929] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2/volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1166.612731] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b53324d6-02ea-4ed7-a700-2980a7b883d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.625421] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b1c511-c63c-4b1f-8fa0-2cd9779ca992 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.635878] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f9e1cc-3b78-4ccb-8434-be701378883c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.639401] env[68279]: DEBUG oslo_vmware.api [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1166.639401] env[68279]: value = "task-2963967" [ 1166.639401] env[68279]: _type = "Task" [ 1166.639401] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.670617] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963966, 'name': Rename_Task, 'duration_secs': 0.165201} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.671970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.672989] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.673727] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932e4684-29aa-4592-b682-0c63fd3a5c5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.679209] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72810352-776b-4417-afdc-229cb6c1160f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.680770] env[68279]: DEBUG oslo_vmware.api [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963967, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.686723] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd848c5-7b96-4c6f-a7a3-472a43a2bebf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.690571] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1166.690571] env[68279]: value = "task-2963968" [ 1166.690571] env[68279]: _type = "Task" [ 1166.690571] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.701951] env[68279]: DEBUG nova.compute.provider_tree [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.709567] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963968, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.799081] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e6f39528-384c-456b-8155-a6856bab3ce0] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.973251] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9d27a50-e822-4d5f-b1fa-d05a6428d1fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.977085] env[68279]: INFO nova.compute.manager [-] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Took 1.26 seconds to deallocate network for instance. [ 1166.986295] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa2bd37-0575-4f03-96be-f82c30f49e2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.019660] env[68279]: DEBUG nova.compute.manager [req-e4f1263d-80d3-4f87-86d1-2b17d8772bd6 req-88be5db0-d61d-42b7-bda3-064cd480d61b service nova] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Detach interface failed, port_id=633e0597-c529-4822-bb62-9eb5fe78047c, reason: Instance 2d05e318-abef-43b0-9ad3-8c839c372780 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1167.092794] env[68279]: INFO nova.compute.manager [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Took 17.56 seconds to build instance. [ 1167.150622] env[68279]: DEBUG oslo_vmware.api [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963967, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.200128] env[68279]: DEBUG oslo_vmware.api [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2963968, 'name': PowerOnVM_Task, 'duration_secs': 0.44514} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.200411] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1167.200616] env[68279]: INFO nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1167.200813] env[68279]: DEBUG nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1167.201592] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2f564f-0afd-4440-a0d3-e7956614b43d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.204844] env[68279]: DEBUG nova.scheduler.client.report [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.302279] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 0d8f8797-649e-45de-8b3c-0b47e1d4cdd9] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.498665] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.594730] env[68279]: DEBUG oslo_concurrency.lockutils [None req-616b0c32-a43d-4ff8-adb5-18ecccbdef07 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.075s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.654074] env[68279]: DEBUG oslo_vmware.api [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963967, 'name': ReconfigVM_Task, 'duration_secs': 0.554062} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.654074] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2/volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.656611] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-126da273-b5c9-4114-94ca-28885d27154a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.672693] env[68279]: DEBUG oslo_vmware.api [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1167.672693] env[68279]: value = "task-2963969" [ 1167.672693] env[68279]: _type = "Task" [ 1167.672693] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.680952] env[68279]: DEBUG oslo_vmware.api [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963969, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.710026] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.712597] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.041s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.713149] env[68279]: DEBUG nova.objects.instance [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lazy-loading 'resources' on Instance uuid 7c9c6661-2e52-4dba-8671-26f69d089903 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1167.725330] env[68279]: INFO nova.compute.manager [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Took 14.89 seconds to build instance. [ 1167.741168] env[68279]: INFO nova.scheduler.client.report [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted allocations for instance 19f84ce0-5ab0-4749-a94a-3dbae0da8757 [ 1167.808679] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 99024851-0add-44b9-a70a-2e242180d6a9] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.183121] env[68279]: DEBUG oslo_vmware.api [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963969, 'name': ReconfigVM_Task, 'duration_secs': 0.157076} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.183455] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594761', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'name': 'volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'serial': '40f025bf-52f1-4b53-8f40-cf05a0b194d2'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1168.226754] env[68279]: DEBUG oslo_concurrency.lockutils [None req-51cb9e37-5d7f-4333-bd75-ddc4e2fdbe23 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.402s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.248688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6ac11366-07e2-40d7-9f0b-a89465d83e25 tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "19f84ce0-5ab0-4749-a94a-3dbae0da8757" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.455s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.311814] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 5c8d5c97-2b1c-4e43-86c1-9dfcd170faab] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.379183] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999a25c0-7d78-4f72-8e7f-a97304be8dd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.387533] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6605c4b-74da-4204-9899-ff188901401e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.419224] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5ce968-337d-4c1a-b0ac-b1a60b13c854 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.428798] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa32cfc8-99fc-46e2-a5ea-b9b51970ad6f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.442189] env[68279]: DEBUG nova.compute.provider_tree [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.494276] env[68279]: DEBUG nova.compute.manager [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Received event network-changed-6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.494469] env[68279]: DEBUG nova.compute.manager [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Refreshing instance network info cache due to event network-changed-6a8f6f79-2136-4be2-a2a5-cb6b32370b13. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1168.494678] env[68279]: DEBUG oslo_concurrency.lockutils [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] Acquiring lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.494820] env[68279]: DEBUG oslo_concurrency.lockutils [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] Acquired lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.496618] env[68279]: DEBUG nova.network.neutron [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Refreshing network info cache for port 6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.822849] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e0afa3e5-4a40-4257-851c-3cccf57b1724] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.945527] env[68279]: DEBUG nova.scheduler.client.report [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.225511] env[68279]: DEBUG nova.objects.instance [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid 11bbfd41-52bb-410c-b368-1473a309d6a7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.246915] env[68279]: DEBUG nova.network.neutron [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updated VIF entry in instance network info cache for port 6a8f6f79-2136-4be2-a2a5-cb6b32370b13. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.247350] env[68279]: DEBUG nova.network.neutron [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updating instance_info_cache with network_info: [{"id": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "address": "fa:16:3e:3f:b4:50", "network": {"id": "4db621f3-3723-433f-abd0-ea52181c429b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-44941193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d71c92734eeb4a6ea05e4c1116c16161", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a8f6f79-21", "ovs_interfaceid": "6a8f6f79-2136-4be2-a2a5-cb6b32370b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.326596] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: a15e5e09-d4a8-4e4b-af58-abe4d6c28f2e] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.451247] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.739s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.453728] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.955s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.453954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.472810] env[68279]: INFO nova.scheduler.client.report [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Deleted allocations for instance 2d05e318-abef-43b0-9ad3-8c839c372780 [ 1169.474679] env[68279]: INFO nova.scheduler.client.report [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Deleted allocations for instance 7c9c6661-2e52-4dba-8671-26f69d089903 [ 1169.731400] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5643954a-8603-4a81-b603-615cf3bd6891 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.794s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.749797] env[68279]: DEBUG oslo_concurrency.lockutils [req-8e340729-68a5-4afa-982b-5d9fb1cb83a3 req-6835088f-444c-4fc5-96af-99085b2fbd40 service nova] Releasing lock "refresh_cache-5827dda0-48a4-4779-b6d2-7fbf73837583" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.772918] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "7e34039c-c51a-4f9c-961c-144f6d8a5130" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.773198] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.773412] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "7e34039c-c51a-4f9c-961c-144f6d8a5130-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.773596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.773763] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.775998] env[68279]: INFO nova.compute.manager [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Terminating instance [ 1169.830445] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 80d881c8-3363-4cf8-bf16-a715d8739335] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.862135] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "78b58db9-0616-428d-999c-2f6548008466" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.862242] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.984498] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8aaee-8cb9-4bc1-81f3-a4228f77f4a8 tempest-DeleteServersTestJSON-586418910 tempest-DeleteServersTestJSON-586418910-project-member] Lock "2d05e318-abef-43b0-9ad3-8c839c372780" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.888s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.985452] env[68279]: DEBUG oslo_concurrency.lockutils [None req-92531a7e-4035-4571-88ea-54406f54f1d7 tempest-ServerShowV247Test-500152011 tempest-ServerShowV247Test-500152011-project-member] Lock "7c9c6661-2e52-4dba-8671-26f69d089903" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.545s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.280020] env[68279]: DEBUG nova.compute.manager [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1170.281055] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1170.281154] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f80d1a-d63b-4127-8a27-069fa78b076d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.291186] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1170.291186] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49f2677c-dcf6-4cb0-9464-fab26d832e44 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.296617] env[68279]: DEBUG oslo_vmware.api [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1170.296617] env[68279]: value = "task-2963970" [ 1170.296617] env[68279]: _type = "Task" [ 1170.296617] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.305931] env[68279]: DEBUG oslo_vmware.api [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963970, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.333594] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 2cdd785d-6758-469f-b1f6-266154853f8c] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.364814] env[68279]: DEBUG nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1170.530275] env[68279]: DEBUG nova.compute.manager [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Received event network-changed-abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.530534] env[68279]: DEBUG nova.compute.manager [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Refreshing instance network info cache due to event network-changed-abfe62ee-1d24-42d6-9928-6425596e2a97. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1170.530820] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] Acquiring lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.531449] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] Acquired lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.531744] env[68279]: DEBUG nova.network.neutron [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Refreshing network info cache for port abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1170.806963] env[68279]: DEBUG oslo_vmware.api [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963970, 'name': PowerOffVM_Task, 'duration_secs': 0.377336} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.807179] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1170.807360] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1170.807600] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e898071d-c1aa-401c-b609-c4c8b550e8f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.836808] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 192734ca-f549-4461-a05a-5f00f0639977] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.875314] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1170.875517] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1170.875920] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleting the datastore file [datastore2] 7e34039c-c51a-4f9c-961c-144f6d8a5130 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1170.875920] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f66b3566-4a3a-42b8-90bd-fce1f6deff06 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.882334] env[68279]: DEBUG oslo_vmware.api [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for the task: (returnval){ [ 1170.882334] env[68279]: value = "task-2963973" [ 1170.882334] env[68279]: _type = "Task" [ 1170.882334] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.889796] env[68279]: DEBUG oslo_vmware.api [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.890888] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.891163] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.892618] env[68279]: INFO nova.compute.claims [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1171.086918] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.087178] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.239909] env[68279]: DEBUG nova.network.neutron [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Updated VIF entry in instance network info cache for port abfe62ee-1d24-42d6-9928-6425596e2a97. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.240414] env[68279]: DEBUG nova.network.neutron [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Updating instance_info_cache with network_info: [{"id": "abfe62ee-1d24-42d6-9928-6425596e2a97", "address": "fa:16:3e:40:27:65", "network": {"id": "b13a3836-972f-46b8-b092-f10feeabccf3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1545247449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e02764848813428dbe0f88c32ad935ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d19577c9-1b2e-490b-8031-2f278dd3f570", "external-id": "nsx-vlan-transportzone-611", "segmentation_id": 611, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabfe62ee-1d", "ovs_interfaceid": "abfe62ee-1d24-42d6-9928-6425596e2a97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.340111] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 65688756-ad94-437f-9a36-bd7e3f7f7a2b] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.391828] env[68279]: DEBUG oslo_vmware.api [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Task: {'id': task-2963973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.497255} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.392096] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1171.392315] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1171.392515] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1171.392687] env[68279]: INFO nova.compute.manager [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1171.392929] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1171.393161] env[68279]: DEBUG nova.compute.manager [-] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1171.393257] env[68279]: DEBUG nova.network.neutron [-] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1171.590449] env[68279]: DEBUG nova.compute.utils [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1171.743411] env[68279]: DEBUG oslo_concurrency.lockutils [req-f2b43ad6-a853-4d21-8909-0235fc40ea71 req-9dad91fe-66b9-4f86-943c-25d1f7497381 service nova] Releasing lock "refresh_cache-79905489-388d-4540-bdff-1c6a02f8bebd" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.846055] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: a30bae8e-ee75-41e4-b1b2-f3dd32ad0e87] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1172.040614] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6392d17e-93e3-4a86-9a65-5f078b9c10aa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.048272] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e58c04-64f5-4007-a5c9-3971c2ded688 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.078719] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f5f3ca-9859-4ee8-8e54-fad7df0c3a8e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.085934] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da9815e-a28a-4e0d-8738-102eac3f0cf9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.099431] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.100093] env[68279]: DEBUG nova.compute.provider_tree [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.128252] env[68279]: DEBUG nova.network.neutron [-] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.349875] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 19f693cd-b598-432d-acf5-64da9f640d5e] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1172.556859] env[68279]: DEBUG nova.compute.manager [req-0fd5ee4d-a813-41e3-9d9d-b6955051b3e5 req-c125402e-e906-4772-b5c6-5c006f309e68 service nova] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Received event network-vif-deleted-ad0276f3-cb04-4653-b770-08562e96ba17 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.603914] env[68279]: DEBUG nova.scheduler.client.report [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.630060] env[68279]: INFO nova.compute.manager [-] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Took 1.24 seconds to deallocate network for instance. [ 1172.853389] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 02f34ac7-9deb-4714-92cb-bb507fde1e74] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1173.109418] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.109746] env[68279]: DEBUG nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1173.135898] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.136186] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.136420] env[68279]: DEBUG nova.objects.instance [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lazy-loading 'resources' on Instance uuid 7e34039c-c51a-4f9c-961c-144f6d8a5130 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1173.153185] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.153415] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.153639] env[68279]: INFO nova.compute.manager [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Attaching volume 06c77ad4-d714-44ed-9ef4-dbbdf205ab84 to /dev/sdc [ 1173.183698] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fb35eb-7b63-4c54-81ba-db2443cda960 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.191239] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c599645d-ca1a-4651-a813-aaa842ede71e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.206890] env[68279]: DEBUG nova.virt.block_device [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updating existing volume attachment record: 9cc3ab77-86a5-44c1-87f0-9eea0b4beee7 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1173.356644] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 866eb440-4fc9-4708-8a3b-b53f2be3f6c8] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1173.614917] env[68279]: DEBUG nova.compute.utils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1173.616519] env[68279]: DEBUG nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1173.616716] env[68279]: DEBUG nova.network.neutron [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1173.669195] env[68279]: DEBUG nova.policy [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd67d0e35641a4494a5087e0f3abdc767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd1384256d224e80bf6f25b9fd054376', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1173.791026] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c34743-d22c-4c3b-b734-beef20d49409 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.799961] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16a5bea-b936-422d-9507-43b16c508b73 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.831568] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce2b53a-17e4-4040-ac73-0214221a8b3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.840020] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9e522a-ce2d-4edc-ae91-75437aa5e179 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.854122] env[68279]: DEBUG nova.compute.provider_tree [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.861112] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 5cbe4915-5b01-4424-96c8-f3225e512c89] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1173.956117] env[68279]: DEBUG nova.network.neutron [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Successfully created port: 5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1174.122090] env[68279]: DEBUG nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1174.357365] env[68279]: DEBUG nova.scheduler.client.report [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1174.363329] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: c2f45a6f-bfe9-40d5-9b77-6536b34ba7d5] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1174.628990] env[68279]: INFO nova.virt.block_device [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Booting with volume 6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7 at /dev/sda [ 1174.667338] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25241341-ba35-4927-8585-a3d1be1bc743 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.678292] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfdfba6-1a6c-4725-bba2-ed0443718826 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.712939] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-136c42d6-90f9-4487-b18c-843457eb431b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.727313] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fb2a16-ac3d-4606-8514-514483a7261a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.762467] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf14d0d-0115-42c2-bd92-4dab83adc022 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.770738] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c324964-6628-412f-a7a7-610db2323f6d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.784914] env[68279]: DEBUG nova.virt.block_device [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating existing volume attachment record: 7ddf790d-0878-43c1-af70-35dcf2f31e88 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1174.862079] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.726s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.865624] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 7f54f9a6-3236-44c1-b327-1941dbfa3ff0] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1174.889836] env[68279]: INFO nova.scheduler.client.report [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Deleted allocations for instance 7e34039c-c51a-4f9c-961c-144f6d8a5130 [ 1175.356484] env[68279]: DEBUG nova.compute.manager [req-69a4624c-f49d-44b5-a13c-bbf8701d517f req-f8bbbfde-c718-4f74-8f83-92bc5f5ed80f service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Received event network-vif-plugged-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.356682] env[68279]: DEBUG oslo_concurrency.lockutils [req-69a4624c-f49d-44b5-a13c-bbf8701d517f req-f8bbbfde-c718-4f74-8f83-92bc5f5ed80f service nova] Acquiring lock "78b58db9-0616-428d-999c-2f6548008466-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.356885] env[68279]: DEBUG oslo_concurrency.lockutils [req-69a4624c-f49d-44b5-a13c-bbf8701d517f req-f8bbbfde-c718-4f74-8f83-92bc5f5ed80f service nova] Lock "78b58db9-0616-428d-999c-2f6548008466-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.357068] env[68279]: DEBUG oslo_concurrency.lockutils [req-69a4624c-f49d-44b5-a13c-bbf8701d517f req-f8bbbfde-c718-4f74-8f83-92bc5f5ed80f service nova] Lock "78b58db9-0616-428d-999c-2f6548008466-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.357234] env[68279]: DEBUG nova.compute.manager [req-69a4624c-f49d-44b5-a13c-bbf8701d517f req-f8bbbfde-c718-4f74-8f83-92bc5f5ed80f service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] No waiting events found dispatching network-vif-plugged-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1175.357386] env[68279]: WARNING nova.compute.manager [req-69a4624c-f49d-44b5-a13c-bbf8701d517f req-f8bbbfde-c718-4f74-8f83-92bc5f5ed80f service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Received unexpected event network-vif-plugged-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe for instance with vm_state building and task_state block_device_mapping. [ 1175.368916] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 7d15a05a-f827-40a7-b182-5d2b553481c7] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1175.398976] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f661359d-733b-47ad-bc41-b6a779efc82d tempest-ServersTestJSON-148746370 tempest-ServersTestJSON-148746370-project-member] Lock "7e34039c-c51a-4f9c-961c-144f6d8a5130" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.626s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.446784] env[68279]: DEBUG nova.network.neutron [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Successfully updated port: 5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1175.871936] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 8aa8c866-4807-4a06-904e-53c149047d65] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1175.949224] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.949414] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.949597] env[68279]: DEBUG nova.network.neutron [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1176.375183] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 777eda1c-ca3f-4db0-b6b9-5901de5781ff] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1176.490224] env[68279]: DEBUG nova.network.neutron [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1176.692179] env[68279]: DEBUG nova.network.neutron [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance_info_cache with network_info: [{"id": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "address": "fa:16:3e:08:21:c7", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bdf1f33-fe", "ovs_interfaceid": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.868094] env[68279]: DEBUG nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1176.868756] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1176.869021] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.869187] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1176.869373] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.869519] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1176.869664] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1176.869866] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1176.873300] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1176.873300] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1176.873300] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1176.873300] env[68279]: DEBUG nova.virt.hardware [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1176.873300] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96aa8689-59c0-4f00-ab4b-3eb9f1798811 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.878364] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 1ae768c9-3e20-4dee-bdd3-35d7c7d878e4] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1176.884444] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e44de1c-d7ba-4924-99aa-dafc96eb7eaa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.194716] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.195077] env[68279]: DEBUG nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Instance network_info: |[{"id": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "address": "fa:16:3e:08:21:c7", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bdf1f33-fe", "ovs_interfaceid": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1177.195512] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:21:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.204984] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.204984] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b58db9-0616-428d-999c-2f6548008466] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1177.205231] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfb2075b-f636-40f9-8cf2-2d7e22d5fdfa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.227413] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.227413] env[68279]: value = "task-2963976" [ 1177.227413] env[68279]: _type = "Task" [ 1177.227413] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.236328] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963976, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.384454] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 97c3000f-a3d8-45c1-b0a4-12eb2b22b572] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1177.389332] env[68279]: DEBUG nova.compute.manager [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Received event network-changed-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.389519] env[68279]: DEBUG nova.compute.manager [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Refreshing instance network info cache due to event network-changed-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1177.390067] env[68279]: DEBUG oslo_concurrency.lockutils [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] Acquiring lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.390067] env[68279]: DEBUG oslo_concurrency.lockutils [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] Acquired lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.390067] env[68279]: DEBUG nova.network.neutron [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Refreshing network info cache for port 5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.737707] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2963976, 'name': CreateVM_Task, 'duration_secs': 0.353385} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.737894] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b58db9-0616-428d-999c-2f6548008466] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1177.738693] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594759', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'name': 'volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '78b58db9-0616-428d-999c-2f6548008466', 'attached_at': '', 'detached_at': '', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'serial': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7'}, 'attachment_id': '7ddf790d-0878-43c1-af70-35dcf2f31e88', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=68279) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1177.738925] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Root volume attach. Driver type: vmdk {{(pid=68279) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1177.739758] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367b9790-0748-40cc-8ff2-defbec232aa2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.749768] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1177.750008] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594762', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'name': 'volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'serial': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1177.750770] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aed7ef4-8e02-4483-81a9-235f28a27aac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.753792] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1842451e-31f1-46a1-80a5-985987fda87b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.770644] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da70d40-1d73-4ad4-9835-4e40a40bd85d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.774448] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167a6eb4-4b8e-4cb1-a6da-41fdf4be28dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.796910] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-fcaf7182-050c-49a5-a8af-905ba4cb918a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.808839] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84/volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.808839] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d619ab7-f4e1-4034-aa1c-c36b58a59017 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.827916] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1177.827916] env[68279]: value = "task-2963978" [ 1177.827916] env[68279]: _type = "Task" [ 1177.827916] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.829368] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1177.829368] env[68279]: value = "task-2963977" [ 1177.829368] env[68279]: _type = "Task" [ 1177.829368] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.841476] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.845470] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.887473] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 67466e30-5944-490c-a89b-2d32c59525be] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1178.146050] env[68279]: DEBUG nova.network.neutron [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updated VIF entry in instance network info cache for port 5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.146402] env[68279]: DEBUG nova.network.neutron [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance_info_cache with network_info: [{"id": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "address": "fa:16:3e:08:21:c7", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bdf1f33-fe", "ovs_interfaceid": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.345903] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 42%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.349423] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963978, 'name': ReconfigVM_Task, 'duration_secs': 0.446998} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.349732] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84/volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.355135] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f2a0f85-4427-4cc8-a8b6-a7efe0898770 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.376502] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1178.376502] env[68279]: value = "task-2963979" [ 1178.376502] env[68279]: _type = "Task" [ 1178.376502] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.389621] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963979, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.391368] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: daccaa30-1011-4c7d-a668-05f9329ab4d5] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1178.649715] env[68279]: DEBUG oslo_concurrency.lockutils [req-6ca29eb3-9f0c-41a9-bc4a-c121edc864cc req-82f98fb8-46a6-4aa3-bf09-a70352de10b0 service nova] Releasing lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.848498] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 56%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.891106] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963979, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.894683] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: b45f310f-e614-47db-9f6e-f35dd481137c] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1179.350827] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 71%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.391935] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963979, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.397735] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e3763645-5a78-4929-98a3-108e72071211] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1179.849405] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 86%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.891812] env[68279]: DEBUG oslo_vmware.api [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963979, 'name': ReconfigVM_Task, 'duration_secs': 1.160263} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.892258] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594762', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'name': 'volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'serial': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1179.900490] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 0b0cb2c2-d9b0-4d2c-a4f9-1cde94e4169a] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1180.350185] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 97%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.405127] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: c62a0d0e-8869-482a-a687-c628b96d6e22] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1180.848494] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 97%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.908263] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: d61b2c4f-942a-4e29-8cac-11bc0750605a] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1180.929013] env[68279]: DEBUG nova.objects.instance [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid 11bbfd41-52bb-410c-b368-1473a309d6a7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.348673] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task} progress is 98%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.411307] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: cfaee7e2-6929-4d8c-8614-e19e0055f2fb] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1181.433995] env[68279]: DEBUG oslo_concurrency.lockutils [None req-58edad34-a71b-49d7-a80f-47ec0a87b3e4 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.280s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.769937] env[68279]: DEBUG oslo_concurrency.lockutils [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.770153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.849145] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963977, 'name': RelocateVM_Task, 'duration_secs': 3.637199} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.849414] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1181.849615] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594759', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'name': 'volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '78b58db9-0616-428d-999c-2f6548008466', 'attached_at': '', 'detached_at': '', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'serial': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1181.850377] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e4cca9-9127-4767-b489-5baab1fcf23b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.865964] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203040d0-9173-45e8-8eb3-13a3904454cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.887657] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7/volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.887915] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbe0e09a-7f83-447a-93f9-75a18bdf9571 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.908427] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1181.908427] env[68279]: value = "task-2963980" [ 1181.908427] env[68279]: _type = "Task" [ 1181.908427] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.917297] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: fe92e176-222c-4c46-a254-1c12e21c68d0] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1181.919222] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963980, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.273465] env[68279]: INFO nova.compute.manager [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Detaching volume 40f025bf-52f1-4b53-8f40-cf05a0b194d2 [ 1182.304111] env[68279]: INFO nova.virt.block_device [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Attempting to driver detach volume 40f025bf-52f1-4b53-8f40-cf05a0b194d2 from mountpoint /dev/sdb [ 1182.304355] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1182.304564] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594761', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'name': 'volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'serial': '40f025bf-52f1-4b53-8f40-cf05a0b194d2'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1182.305450] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbaca809-9be6-4e8c-a71f-34d3a94c0367 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.331190] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c284fd-e252-4ad1-aadf-c87b42e59899 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.338628] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85032ebe-83ae-40a9-beb3-7b77d2c07cc7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.361276] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f173a0-0385-41e1-a8f8-60cd0e0dda56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.376681] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] The volume has not been displaced from its original location: [datastore2] volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2/volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1182.381831] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1182.382099] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58fd7fef-c876-44be-b7cd-1889821f93ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.399949] env[68279]: DEBUG oslo_vmware.api [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1182.399949] env[68279]: value = "task-2963981" [ 1182.399949] env[68279]: _type = "Task" [ 1182.399949] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.407871] env[68279]: DEBUG oslo_vmware.api [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963981, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.416864] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963980, 'name': ReconfigVM_Task, 'duration_secs': 0.279765} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.417138] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfigured VM instance instance-00000076 to attach disk [datastore1] volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7/volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1182.422446] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 665d932d-1068-4bb2-835c-2184a80753d1] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1182.424277] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3423c610-bca3-4551-bcaf-51de28bc89ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.440711] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1182.440711] env[68279]: value = "task-2963982" [ 1182.440711] env[68279]: _type = "Task" [ 1182.440711] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.450147] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.910284] env[68279]: DEBUG oslo_vmware.api [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963981, 'name': ReconfigVM_Task, 'duration_secs': 0.281578} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.910575] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1182.915185] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2758d788-04c8-4825-9c24-7607c3ccd4b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.930862] env[68279]: DEBUG oslo_vmware.api [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1182.930862] env[68279]: value = "task-2963983" [ 1182.930862] env[68279]: _type = "Task" [ 1182.930862] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.934652] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: b2e272b3-520a-4ef7-8141-a9d55739d6b9] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1182.939562] env[68279]: DEBUG oslo_vmware.api [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963983, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.950682] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963982, 'name': ReconfigVM_Task, 'duration_secs': 0.134475} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.950855] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594759', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'name': 'volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '78b58db9-0616-428d-999c-2f6548008466', 'attached_at': '', 'detached_at': '', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'serial': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1182.951266] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8dfe9c40-cf71-4670-8996-ccefb28ddea0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.959981] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1182.959981] env[68279]: value = "task-2963984" [ 1182.959981] env[68279]: _type = "Task" [ 1182.959981] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.969038] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963984, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.442530] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 015e0d33-8bab-4ba7-8eb8-5a2c3fee8f03] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1183.444279] env[68279]: DEBUG oslo_vmware.api [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963983, 'name': ReconfigVM_Task, 'duration_secs': 0.142472} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.444545] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594761', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'name': 'volume-40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '40f025bf-52f1-4b53-8f40-cf05a0b194d2', 'serial': '40f025bf-52f1-4b53-8f40-cf05a0b194d2'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1183.469565] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963984, 'name': Rename_Task, 'duration_secs': 0.141796} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.470413] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1183.470654] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48ac923b-5410-4380-870d-f9d3f62b8291 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.480331] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1183.480331] env[68279]: value = "task-2963985" [ 1183.480331] env[68279]: _type = "Task" [ 1183.480331] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.487948] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.947522] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: f7db383a-648a-4984-ae25-72bc2ccfe369] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1183.985461] env[68279]: DEBUG nova.objects.instance [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid 11bbfd41-52bb-410c-b368-1473a309d6a7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.993101] env[68279]: DEBUG oslo_vmware.api [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963985, 'name': PowerOnVM_Task, 'duration_secs': 0.4374} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.993374] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.993609] env[68279]: INFO nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Took 7.13 seconds to spawn the instance on the hypervisor. [ 1183.993791] env[68279]: DEBUG nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1183.994570] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255d0930-f08a-47df-927d-7be3ce81c3bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.451319] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 932663fb-ea20-48d2-b6e8-2d3b32bbdd8e] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1184.512272] env[68279]: INFO nova.compute.manager [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Took 13.64 seconds to build instance. [ 1184.954843] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: b1e2f4a1-a8a0-4c4f-8a09-e676e365b48e] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1184.992148] env[68279]: DEBUG oslo_concurrency.lockutils [None req-dbd2d6a5-1f08-475a-b0b9-f1f31ae23600 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.222s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.013109] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.013368] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.014702] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ceaa9b6a-319b-4b48-9f62-1b331675a64f tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.152s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.462025] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 95f0aeaa-75ab-4fd9-b28d-e43703429167] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1185.515825] env[68279]: INFO nova.compute.manager [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Detaching volume 06c77ad4-d714-44ed-9ef4-dbbdf205ab84 [ 1185.554717] env[68279]: INFO nova.virt.block_device [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Attempting to driver detach volume 06c77ad4-d714-44ed-9ef4-dbbdf205ab84 from mountpoint /dev/sdc [ 1185.554948] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1185.555151] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594762', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'name': 'volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'serial': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1185.556072] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6cd692-2207-4887-97d0-25a1e0418101 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.594115] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3152251-5fce-46b5-8ec6-12ea395386cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.610686] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6a1f5a-6025-4104-8b39-4ddf98cb92d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.640844] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4dda22-aae8-47af-9296-948730d3cebf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.657913] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] The volume has not been displaced from its original location: [datastore1] volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84/volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1185.663179] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfiguring VM instance instance-0000006c to detach disk 2002 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1185.663405] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fd4bdef-3be2-429a-86de-40fe875690e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.682656] env[68279]: DEBUG oslo_vmware.api [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1185.682656] env[68279]: value = "task-2963986" [ 1185.682656] env[68279]: _type = "Task" [ 1185.682656] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.691492] env[68279]: DEBUG oslo_vmware.api [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963986, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.721984] env[68279]: DEBUG nova.compute.manager [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Received event network-changed-7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1185.722265] env[68279]: DEBUG nova.compute.manager [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Refreshing instance network info cache due to event network-changed-7dcc683d-e5ad-49a8-8e28-a1af77590026. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1185.722573] env[68279]: DEBUG oslo_concurrency.lockutils [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] Acquiring lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.722722] env[68279]: DEBUG oslo_concurrency.lockutils [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] Acquired lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.722877] env[68279]: DEBUG nova.network.neutron [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Refreshing network info cache for port 7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1185.961934] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: eccc5882-2c8b-456d-bbd2-d9ed22777a77] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1186.105201] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.105443] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.105645] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.105817] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.105978] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.108121] env[68279]: INFO nova.compute.manager [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Terminating instance [ 1186.194073] env[68279]: DEBUG oslo_vmware.api [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963986, 'name': ReconfigVM_Task, 'duration_secs': 0.21542} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.194369] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Reconfigured VM instance instance-0000006c to detach disk 2002 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1186.198974] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c742c2f0-be7b-457c-aa41-9b653f03274c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.215594] env[68279]: DEBUG oslo_vmware.api [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1186.215594] env[68279]: value = "task-2963987" [ 1186.215594] env[68279]: _type = "Task" [ 1186.215594] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.223947] env[68279]: DEBUG oslo_vmware.api [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.321610] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.321926] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.322187] env[68279]: INFO nova.compute.manager [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Rebooting instance [ 1186.434379] env[68279]: DEBUG nova.network.neutron [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updated VIF entry in instance network info cache for port 7dcc683d-e5ad-49a8-8e28-a1af77590026. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.434758] env[68279]: DEBUG nova.network.neutron [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updating instance_info_cache with network_info: [{"id": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "address": "fa:16:3e:f1:ad:db", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcc683d-e5", "ovs_interfaceid": "7dcc683d-e5ad-49a8-8e28-a1af77590026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.465216] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 336b7399-b64e-411f-99bc-ba0d292e371a] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1186.612021] env[68279]: DEBUG nova.compute.manager [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1186.612267] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.613179] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049a6396-b84b-4c2a-8dcb-1bc26e419793 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.621526] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.621769] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77fee027-8d10-4bc1-bb25-bd4fcad73cd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.628962] env[68279]: DEBUG oslo_vmware.api [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1186.628962] env[68279]: value = "task-2963988" [ 1186.628962] env[68279]: _type = "Task" [ 1186.628962] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.637085] env[68279]: DEBUG oslo_vmware.api [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.726875] env[68279]: DEBUG oslo_vmware.api [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963987, 'name': ReconfigVM_Task, 'duration_secs': 0.143889} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.727194] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594762', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'name': 'volume-06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '11bbfd41-52bb-410c-b368-1473a309d6a7', 'attached_at': '', 'detached_at': '', 'volume_id': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84', 'serial': '06c77ad4-d714-44ed-9ef4-dbbdf205ab84'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1186.847899] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.847899] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.847899] env[68279]: DEBUG nova.network.neutron [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1186.937504] env[68279]: DEBUG oslo_concurrency.lockutils [req-a5d0f82b-c1d1-42c9-b37c-0d9a72a13b76 req-93ded9a4-87a2-4ecc-801c-52b1c4d1fd91 service nova] Releasing lock "refresh_cache-298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.968512] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 6f461623-93a7-4cb9-9ae6-a9fe15c3d7d1] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1187.140441] env[68279]: DEBUG oslo_vmware.api [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963988, 'name': PowerOffVM_Task, 'duration_secs': 0.204539} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.140785] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.140935] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.141241] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-083b1fd6-c9a1-40f4-b713-f2f81f9e14c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.174378] env[68279]: DEBUG nova.compute.manager [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1187.225592] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.225855] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.226063] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleting the datastore file [datastore1] dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.226380] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a78ae15-4dbe-406d-8dbe-9ea7bb8317d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.237058] env[68279]: DEBUG oslo_vmware.api [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for the task: (returnval){ [ 1187.237058] env[68279]: value = "task-2963990" [ 1187.237058] env[68279]: _type = "Task" [ 1187.237058] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.246230] env[68279]: DEBUG oslo_vmware.api [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.274377] env[68279]: DEBUG nova.objects.instance [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'flavor' on Instance uuid 11bbfd41-52bb-410c-b368-1473a309d6a7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.472081] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 1d0f383f-6bf9-42d0-b6c6-1f276eb181cb] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1187.558955] env[68279]: DEBUG nova.network.neutron [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.693412] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.693917] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.749041] env[68279]: DEBUG oslo_vmware.api [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.751182] env[68279]: DEBUG nova.compute.manager [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Received event network-changed-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1187.751367] env[68279]: DEBUG nova.compute.manager [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Refreshing instance network info cache due to event network-changed-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1187.751573] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] Acquiring lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.751713] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] Acquired lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.751869] env[68279]: DEBUG nova.network.neutron [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Refreshing network info cache for port 5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1187.976802] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 50e08259-7915-49bb-b137-5cc6e9d53c16] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1188.061442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.198888] env[68279]: INFO nova.compute.claims [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.249284] env[68279]: DEBUG oslo_vmware.api [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Task: {'id': task-2963990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.64139} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.249474] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.249655] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.249827] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.249996] env[68279]: INFO nova.compute.manager [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1188.250260] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.250454] env[68279]: DEBUG nova.compute.manager [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1188.250547] env[68279]: DEBUG nova.network.neutron [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.280970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b69d569-15e8-41ac-9ef9-23b3bbdb66c7 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.267s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.468356] env[68279]: DEBUG nova.network.neutron [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updated VIF entry in instance network info cache for port 5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1188.468701] env[68279]: DEBUG nova.network.neutron [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance_info_cache with network_info: [{"id": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "address": "fa:16:3e:08:21:c7", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bdf1f33-fe", "ovs_interfaceid": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.480032] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.480032] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Cleaning up deleted instances with incomplete migration {{(pid=68279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1188.566075] env[68279]: DEBUG nova.compute.manager [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1188.566959] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9503743b-7f9c-4250-aebf-cd5954494ee4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.705125] env[68279]: INFO nova.compute.resource_tracker [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating resource usage from migration 0c9a561b-4ed1-436e-935b-95e2a146d961 [ 1188.723020] env[68279]: DEBUG nova.scheduler.client.report [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1188.740594] env[68279]: DEBUG nova.scheduler.client.report [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1188.740862] env[68279]: DEBUG nova.compute.provider_tree [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1188.751948] env[68279]: DEBUG nova.scheduler.client.report [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1188.773783] env[68279]: DEBUG nova.scheduler.client.report [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1188.914248] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37376285-a574-40f3-b4b0-8de583214638 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.924480] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860c8308-d7e4-44a1-a1cb-87b4fe3ced1a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.960959] env[68279]: DEBUG nova.compute.manager [req-f0c6db51-bb82-4ae7-97d2-29efaf2aef48 req-4ee3d45f-229d-4c43-8652-081f1c773ba1 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Received event network-vif-deleted-343369ce-f2d1-401a-9a78-b72854001a75 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1188.961185] env[68279]: INFO nova.compute.manager [req-f0c6db51-bb82-4ae7-97d2-29efaf2aef48 req-4ee3d45f-229d-4c43-8652-081f1c773ba1 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Neutron deleted interface 343369ce-f2d1-401a-9a78-b72854001a75; detaching it from the instance and deleting it from the info cache [ 1188.961348] env[68279]: DEBUG nova.network.neutron [req-f0c6db51-bb82-4ae7-97d2-29efaf2aef48 req-4ee3d45f-229d-4c43-8652-081f1c773ba1 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.963160] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.963406] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.963610] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "11bbfd41-52bb-410c-b368-1473a309d6a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.963791] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.964085] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.966184] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbefc889-3877-459d-b261-f8fbc71ffd36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.969201] env[68279]: INFO nova.compute.manager [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Terminating instance [ 1188.971593] env[68279]: DEBUG oslo_concurrency.lockutils [req-cf6cd5ee-085b-449e-bac8-48eab938653b req-eb2792cd-fda8-4595-81b7-493fb1e095f8 service nova] Releasing lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.979452] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca5dea7-3494-47ba-8b40-a8156792d139 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.996259] env[68279]: DEBUG nova.compute.provider_tree [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.419911] env[68279]: DEBUG nova.network.neutron [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.464481] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75d344aa-569a-4a94-943b-f7922cc85a25 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.474496] env[68279]: DEBUG nova.compute.manager [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1189.474712] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1189.475599] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b45ad41-2c4d-4cb8-a824-9a9091ab6a70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.480815] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4937708-8d23-44f9-ac9e-da09fa96941b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.497827] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1189.498715] env[68279]: DEBUG nova.scheduler.client.report [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.501759] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0317fef-51cd-4be6-8fb9-a624c7c2e5d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.510356] env[68279]: DEBUG oslo_vmware.api [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1189.510356] env[68279]: value = "task-2963991" [ 1189.510356] env[68279]: _type = "Task" [ 1189.510356] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.521562] env[68279]: DEBUG nova.compute.manager [req-f0c6db51-bb82-4ae7-97d2-29efaf2aef48 req-4ee3d45f-229d-4c43-8652-081f1c773ba1 service nova] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Detach interface failed, port_id=343369ce-f2d1-401a-9a78-b72854001a75, reason: Instance dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1189.532142] env[68279]: DEBUG oslo_vmware.api [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.583899] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5186cb9-ebe1-4e54-a537-f87c60d8aa14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.592514] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Doing hard reboot of VM {{(pid=68279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1189.592840] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-cd74a56f-fec6-468e-8a7e-ac6d25dab84a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.600512] env[68279]: DEBUG oslo_vmware.api [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1189.600512] env[68279]: value = "task-2963992" [ 1189.600512] env[68279]: _type = "Task" [ 1189.600512] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.609832] env[68279]: DEBUG oslo_vmware.api [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963992, 'name': ResetVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.923166] env[68279]: INFO nova.compute.manager [-] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Took 1.67 seconds to deallocate network for instance. [ 1190.004700] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.311s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.005116] env[68279]: INFO nova.compute.manager [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Migrating [ 1190.031880] env[68279]: DEBUG oslo_vmware.api [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963991, 'name': PowerOffVM_Task, 'duration_secs': 0.198415} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.032148] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1190.032318] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1190.032581] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3139c16-71b2-4d7a-ba33-14e400198cee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.099969] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1190.100123] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1190.100421] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Deleting the datastore file [datastore1] 11bbfd41-52bb-410c-b368-1473a309d6a7 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1190.100790] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d6eefaf-71c8-4ba7-b21c-7f0f19b5c8cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.112582] env[68279]: DEBUG oslo_vmware.api [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2963992, 'name': ResetVM_Task, 'duration_secs': 0.09935} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.114146] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Did hard reboot of VM {{(pid=68279) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1190.114370] env[68279]: DEBUG nova.compute.manager [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1190.114712] env[68279]: DEBUG oslo_vmware.api [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for the task: (returnval){ [ 1190.114712] env[68279]: value = "task-2963994" [ 1190.114712] env[68279]: _type = "Task" [ 1190.114712] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.115416] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15da294-51b8-440d-a344-53cd775af43a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.127328] env[68279]: DEBUG oslo_vmware.api [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.430097] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.430392] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.430617] env[68279]: DEBUG nova.objects.instance [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lazy-loading 'resources' on Instance uuid dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.519549] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.519777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.519965] env[68279]: DEBUG nova.network.neutron [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.628935] env[68279]: DEBUG oslo_vmware.api [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Task: {'id': task-2963994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132383} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.629219] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1190.629379] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1190.629556] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1190.629729] env[68279]: INFO nova.compute.manager [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1190.629976] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.630188] env[68279]: DEBUG nova.compute.manager [-] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1190.630280] env[68279]: DEBUG nova.network.neutron [-] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1190.635442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-54e8d253-4f3f-4f7e-9e20-ae579a29546e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.313s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.075409] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81340178-bd37-498a-90e6-0faf9f7379c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.083959] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d986871c-c7bf-4199-b3b6-52920558a555 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.118791] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf3b77d-6974-4346-8c55-ad7ae691ea1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.129992] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210fb09e-5e1c-452b-82a3-c8aea197aa58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.148569] env[68279]: DEBUG nova.compute.provider_tree [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.151919] env[68279]: DEBUG nova.compute.manager [req-c083a788-aa9d-4658-a080-a685352c4594 req-3c28837b-0800-482f-9787-6564ab43d058 service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Received event network-vif-deleted-03be8849-6f9f-415f-b7fb-ccc79a5734fd {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1191.152036] env[68279]: INFO nova.compute.manager [req-c083a788-aa9d-4658-a080-a685352c4594 req-3c28837b-0800-482f-9787-6564ab43d058 service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Neutron deleted interface 03be8849-6f9f-415f-b7fb-ccc79a5734fd; detaching it from the instance and deleting it from the info cache [ 1191.152207] env[68279]: DEBUG nova.network.neutron [req-c083a788-aa9d-4658-a080-a685352c4594 req-3c28837b-0800-482f-9787-6564ab43d058 service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.266861] env[68279]: DEBUG nova.network.neutron [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance_info_cache with network_info: [{"id": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "address": "fa:16:3e:08:21:c7", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bdf1f33-fe", "ovs_interfaceid": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.624660] env[68279]: DEBUG nova.network.neutron [-] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.654511] env[68279]: DEBUG nova.scheduler.client.report [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.658081] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f28fa45d-4a58-41ea-af45-18b3bfcc10a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.669526] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b2a3c3-ea48-4e2b-9079-ffce55c781ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.703481] env[68279]: DEBUG nova.compute.manager [req-c083a788-aa9d-4658-a080-a685352c4594 req-3c28837b-0800-482f-9787-6564ab43d058 service nova] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Detach interface failed, port_id=03be8849-6f9f-415f-b7fb-ccc79a5734fd, reason: Instance 11bbfd41-52bb-410c-b368-1473a309d6a7 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1191.769529] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.128150] env[68279]: INFO nova.compute.manager [-] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Took 1.50 seconds to deallocate network for instance. [ 1192.163234] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.188646] env[68279]: INFO nova.scheduler.client.report [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Deleted allocations for instance dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba [ 1192.634540] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.634830] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.635057] env[68279]: DEBUG nova.objects.instance [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lazy-loading 'resources' on Instance uuid 11bbfd41-52bb-410c-b368-1473a309d6a7 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.696057] env[68279]: DEBUG oslo_concurrency.lockutils [None req-30539696-ca3b-4bc1-9348-b3cf6b3b585f tempest-AttachVolumeShelveTestJSON-1919725419 tempest-AttachVolumeShelveTestJSON-1919725419-project-member] Lock "dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.590s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.251637] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c360c1a1-6810-425d-95f7-55388ed2894e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.259900] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a75d41-0cf4-4423-9bc4-eb70765aa414 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.292128] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fa6517-c923-4429-a1a2-c4b697432a4b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.295224] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c953b7f1-2330-4eac-8d40-af2950e31bb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.314894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b746e696-c238-4bc1-b133-02105cf2cade {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.318698] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance '78b58db9-0616-428d-999c-2f6548008466' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1193.331611] env[68279]: DEBUG nova.compute.provider_tree [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.716074] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6e56dd7f-4add-4cc3-ba0c-d41e190ebd86 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.716074] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6e56dd7f-4add-4cc3-ba0c-d41e190ebd86 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.716353] env[68279]: DEBUG nova.objects.instance [None req-6e56dd7f-4add-4cc3-ba0c-d41e190ebd86 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'flavor' on Instance uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.825600] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.825843] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fea67516-726e-404e-8d47-af314bd19f8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.834273] env[68279]: DEBUG nova.scheduler.client.report [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.837202] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1193.837202] env[68279]: value = "task-2963995" [ 1193.837202] env[68279]: _type = "Task" [ 1193.837202] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.854175] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.220539] env[68279]: DEBUG nova.objects.instance [None req-6e56dd7f-4add-4cc3-ba0c-d41e190ebd86 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'pci_requests' on Instance uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1194.338917] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.350062] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963995, 'name': PowerOffVM_Task, 'duration_secs': 0.19157} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.350336] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.350520] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance '78b58db9-0616-428d-999c-2f6548008466' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1194.363776] env[68279]: INFO nova.scheduler.client.report [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Deleted allocations for instance 11bbfd41-52bb-410c-b368-1473a309d6a7 [ 1194.723325] env[68279]: DEBUG nova.objects.base [None req-6e56dd7f-4add-4cc3-ba0c-d41e190ebd86 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1194.723651] env[68279]: DEBUG nova.network.neutron [None req-6e56dd7f-4add-4cc3-ba0c-d41e190ebd86 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1194.804765] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6e56dd7f-4add-4cc3-ba0c-d41e190ebd86 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.089s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.857712] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1194.857953] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.858104] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1194.858290] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.858435] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1194.858586] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1194.858869] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1194.859056] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1194.859229] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1194.859392] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1194.859565] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1194.864566] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33a7652f-e269-461d-9b8b-026d08472017 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.877804] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d0f1a5cd-d25c-4083-952d-74f2af80ebf6 tempest-AttachVolumeTestJSON-307007662 tempest-AttachVolumeTestJSON-307007662-project-member] Lock "11bbfd41-52bb-410c-b368-1473a309d6a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.914s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.883481] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1194.883481] env[68279]: value = "task-2963997" [ 1194.883481] env[68279]: _type = "Task" [ 1194.883481] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.892278] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963997, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.396935] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.894429] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963997, 'name': ReconfigVM_Task, 'duration_secs': 0.611934} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.894716] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance '78b58db9-0616-428d-999c-2f6548008466' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1196.401044] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1196.401390] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.401438] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1196.401646] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.401802] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1196.401946] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1196.402159] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1196.402330] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1196.402501] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1196.402659] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1196.402831] env[68279]: DEBUG nova.virt.hardware [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1196.408136] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfiguring VM instance instance-00000076 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1196.408410] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ab86862-0083-4549-9c1c-e736823b93a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.435215] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1196.435215] env[68279]: value = "task-2963999" [ 1196.435215] env[68279]: _type = "Task" [ 1196.435215] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.443121] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963999, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.757156] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.757471] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.757897] env[68279]: DEBUG nova.objects.instance [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'flavor' on Instance uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.945874] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2963999, 'name': ReconfigVM_Task, 'duration_secs': 0.152088} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.946187] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfigured VM instance instance-00000076 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1196.946970] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90df2fa-9589-4e41-9e96-80f336859c5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.968728] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7/volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1196.968999] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8c8dfc2-a29c-4283-b3df-e82541b6cc8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.986947] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1196.986947] env[68279]: value = "task-2964000" [ 1196.986947] env[68279]: _type = "Task" [ 1196.986947] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.994740] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.376664] env[68279]: DEBUG nova.objects.instance [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'pci_requests' on Instance uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.497299] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964000, 'name': ReconfigVM_Task, 'duration_secs': 0.250982} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.497672] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfigured VM instance instance-00000076 to attach disk [datastore1] volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7/volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1197.497831] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance '78b58db9-0616-428d-999c-2f6548008466' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.880086] env[68279]: DEBUG nova.objects.base [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1197.880086] env[68279]: DEBUG nova.network.neutron [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1197.916847] env[68279]: DEBUG nova.policy [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1198.004405] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c27d687-1023-4fe8-82eb-648fac8f3f1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.024303] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec555c1-f1d9-42c2-bf8c-06386644da99 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.044372] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance '78b58db9-0616-428d-999c-2f6548008466' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1198.194375] env[68279]: DEBUG nova.network.neutron [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Successfully created port: 8fca9b89-a21f-432d-acfb-74c144eba872 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1199.555046] env[68279]: DEBUG nova.compute.manager [req-fdddd797-2e9b-419d-8292-313572cd69cb req-3fa3fb8e-dfd0-499c-b107-03aaed6feff0 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-vif-plugged-8fca9b89-a21f-432d-acfb-74c144eba872 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1199.555307] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdddd797-2e9b-419d-8292-313572cd69cb req-3fa3fb8e-dfd0-499c-b107-03aaed6feff0 service nova] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.555473] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdddd797-2e9b-419d-8292-313572cd69cb req-3fa3fb8e-dfd0-499c-b107-03aaed6feff0 service nova] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.555642] env[68279]: DEBUG oslo_concurrency.lockutils [req-fdddd797-2e9b-419d-8292-313572cd69cb req-3fa3fb8e-dfd0-499c-b107-03aaed6feff0 service nova] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.555877] env[68279]: DEBUG nova.compute.manager [req-fdddd797-2e9b-419d-8292-313572cd69cb req-3fa3fb8e-dfd0-499c-b107-03aaed6feff0 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] No waiting events found dispatching network-vif-plugged-8fca9b89-a21f-432d-acfb-74c144eba872 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1199.556086] env[68279]: WARNING nova.compute.manager [req-fdddd797-2e9b-419d-8292-313572cd69cb req-3fa3fb8e-dfd0-499c-b107-03aaed6feff0 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received unexpected event network-vif-plugged-8fca9b89-a21f-432d-acfb-74c144eba872 for instance with vm_state active and task_state None. [ 1199.640294] env[68279]: DEBUG nova.network.neutron [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Successfully updated port: 8fca9b89-a21f-432d-acfb-74c144eba872 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1199.694453] env[68279]: DEBUG nova.network.neutron [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Port 5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1200.143457] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.143701] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.143877] env[68279]: DEBUG nova.network.neutron [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.689012] env[68279]: WARNING nova.network.neutron [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] c0150443-bd24-4fb2-8b27-b118d22250c3 already exists in list: networks containing: ['c0150443-bd24-4fb2-8b27-b118d22250c3']. ignoring it [ 1200.715139] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "78b58db9-0616-428d-999c-2f6548008466-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.715354] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.715512] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.970497] env[68279]: DEBUG nova.network.neutron [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8fca9b89-a21f-432d-acfb-74c144eba872", "address": "fa:16:3e:2f:e2:1e", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fca9b89-a2", "ovs_interfaceid": "8fca9b89-a21f-432d-acfb-74c144eba872", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.473641] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.474410] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.474615] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.475476] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431e1c54-2796-4166-9505-890d7ae18cb8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.494133] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.494348] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.494521] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.494717] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.494867] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.495033] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.495240] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.495397] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.495562] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.495724] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.495897] env[68279]: DEBUG nova.virt.hardware [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.502065] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfiguring VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1201.502357] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-131744a9-384d-48ee-a6e7-0171010a834e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.518749] env[68279]: DEBUG oslo_vmware.api [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1201.518749] env[68279]: value = "task-2964003" [ 1201.518749] env[68279]: _type = "Task" [ 1201.518749] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.525966] env[68279]: DEBUG oslo_vmware.api [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964003, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.581527] env[68279]: DEBUG nova.compute.manager [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-changed-8fca9b89-a21f-432d-acfb-74c144eba872 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1201.581722] env[68279]: DEBUG nova.compute.manager [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing instance network info cache due to event network-changed-8fca9b89-a21f-432d-acfb-74c144eba872. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1201.581937] env[68279]: DEBUG oslo_concurrency.lockutils [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.582093] env[68279]: DEBUG oslo_concurrency.lockutils [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.582257] env[68279]: DEBUG nova.network.neutron [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing network info cache for port 8fca9b89-a21f-432d-acfb-74c144eba872 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1201.756552] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.756840] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.756939] env[68279]: DEBUG nova.network.neutron [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1202.029970] env[68279]: DEBUG oslo_vmware.api [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964003, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.281247] env[68279]: DEBUG nova.network.neutron [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updated VIF entry in instance network info cache for port 8fca9b89-a21f-432d-acfb-74c144eba872. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1202.281699] env[68279]: DEBUG nova.network.neutron [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8fca9b89-a21f-432d-acfb-74c144eba872", "address": "fa:16:3e:2f:e2:1e", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fca9b89-a2", "ovs_interfaceid": "8fca9b89-a21f-432d-acfb-74c144eba872", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.452910] env[68279]: DEBUG nova.network.neutron [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance_info_cache with network_info: [{"id": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "address": "fa:16:3e:08:21:c7", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bdf1f33-fe", "ovs_interfaceid": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.528694] env[68279]: DEBUG oslo_vmware.api [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964003, 'name': ReconfigVM_Task, 'duration_secs': 0.605699} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.529176] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.529388] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfigured VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1202.784538] env[68279]: DEBUG oslo_concurrency.lockutils [req-18b710a5-dab8-40f5-8d6d-a688f00c79b3 req-9349437e-5eef-4abb-be05-7313da79eb35 service nova] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.956365] env[68279]: DEBUG oslo_concurrency.lockutils [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.033682] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5538f160-a287-40a7-bf2e-560240797d0a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.276s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.465401] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79e897f-df4e-4ecf-aac2-0e15b05a6513 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.475020] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263120fe-dde7-4637-aac6-e9fd9b7d0323 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.591501] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e0d0ef-0266-4ce5-be6b-819d06d87b3e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.611491] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455ed640-adcc-4dc3-a42d-2525125dfb1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.619826] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance '78b58db9-0616-428d-999c-2f6548008466' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1204.828864] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-ef908c81-5301-4ed1-91cd-045d037f3909" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.828864] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-ef908c81-5301-4ed1-91cd-045d037f3909" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.829123] env[68279]: DEBUG nova.objects.instance [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'flavor' on Instance uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.863461] env[68279]: INFO nova.compute.manager [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Rebuilding instance [ 1204.905285] env[68279]: DEBUG nova.compute.manager [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1204.906152] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376d3b32-afa8-4bcc-b31b-a6ff8f998d65 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.128128] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1205.128451] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08073d4f-8185-4e95-b9de-29994a0325cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.136305] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1205.136305] env[68279]: value = "task-2964004" [ 1205.136305] env[68279]: _type = "Task" [ 1205.136305] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.143688] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964004, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.450166] env[68279]: DEBUG nova.objects.instance [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'pci_requests' on Instance uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.648024] env[68279]: DEBUG oslo_vmware.api [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964004, 'name': PowerOnVM_Task, 'duration_secs': 0.380811} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.648024] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1205.648024] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d3c4dd-df40-4b62-b61a-4927cab4be1e tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance '78b58db9-0616-428d-999c-2f6548008466' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1205.922675] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1205.923017] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3f99305-f9b3-4cdd-92b4-69268d7c9f34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.931257] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1205.931257] env[68279]: value = "task-2964005" [ 1205.931257] env[68279]: _type = "Task" [ 1205.931257] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.943704] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.953176] env[68279]: DEBUG nova.objects.base [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1205.953468] env[68279]: DEBUG nova.network.neutron [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1206.018617] env[68279]: DEBUG nova.policy [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1206.441288] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964005, 'name': PowerOffVM_Task, 'duration_secs': 0.213783} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.441567] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1206.442280] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.442535] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee2c6ef9-c068-4e7c-95a2-925c6a86df12 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.448503] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1206.448503] env[68279]: value = "task-2964006" [ 1206.448503] env[68279]: _type = "Task" [ 1206.448503] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.455697] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.959678] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1206.960093] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1206.960157] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594751', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'name': 'volume-47c73841-df53-45da-9226-b82f51a4c434', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5827dda0-48a4-4779-b6d2-7fbf73837583', 'attached_at': '', 'detached_at': '', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'serial': '47c73841-df53-45da-9226-b82f51a4c434'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1206.960942] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f1b97b-64bd-484a-b762-19e90a46dfa7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.979419] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837aa87f-caf4-4645-8874-33cce35f4d35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.986121] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe7bd68-d899-4875-82e1-50fd8d8588d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.005705] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b308c2e-bb9b-4150-acf7-3d72a90e3655 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.029107] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] The volume has not been displaced from its original location: [datastore2] volume-47c73841-df53-45da-9226-b82f51a4c434/volume-47c73841-df53-45da-9226-b82f51a4c434.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1207.039046] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Reconfiguring VM instance instance-00000074 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1207.039410] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a131ddc0-2431-4d60-920f-05b2ac4cc722 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.068247] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1207.068247] env[68279]: value = "task-2964007" [ 1207.068247] env[68279]: _type = "Task" [ 1207.068247] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.079780] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.389450] env[68279]: DEBUG nova.compute.manager [req-aca42bc1-dc72-49c7-b3f2-b818770c11fb req-bb500bcc-cbdf-4eec-9181-fe7a5c865468 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-vif-plugged-ef908c81-5301-4ed1-91cd-045d037f3909 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1207.389673] env[68279]: DEBUG oslo_concurrency.lockutils [req-aca42bc1-dc72-49c7-b3f2-b818770c11fb req-bb500bcc-cbdf-4eec-9181-fe7a5c865468 service nova] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.389879] env[68279]: DEBUG oslo_concurrency.lockutils [req-aca42bc1-dc72-49c7-b3f2-b818770c11fb req-bb500bcc-cbdf-4eec-9181-fe7a5c865468 service nova] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.390060] env[68279]: DEBUG oslo_concurrency.lockutils [req-aca42bc1-dc72-49c7-b3f2-b818770c11fb req-bb500bcc-cbdf-4eec-9181-fe7a5c865468 service nova] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.390223] env[68279]: DEBUG nova.compute.manager [req-aca42bc1-dc72-49c7-b3f2-b818770c11fb req-bb500bcc-cbdf-4eec-9181-fe7a5c865468 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] No waiting events found dispatching network-vif-plugged-ef908c81-5301-4ed1-91cd-045d037f3909 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1207.390385] env[68279]: WARNING nova.compute.manager [req-aca42bc1-dc72-49c7-b3f2-b818770c11fb req-bb500bcc-cbdf-4eec-9181-fe7a5c865468 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received unexpected event network-vif-plugged-ef908c81-5301-4ed1-91cd-045d037f3909 for instance with vm_state active and task_state None. [ 1207.469712] env[68279]: DEBUG nova.network.neutron [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Successfully updated port: ef908c81-5301-4ed1-91cd-045d037f3909 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1207.486784] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.487133] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.579050] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964007, 'name': ReconfigVM_Task, 'duration_secs': 0.154903} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.579385] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Reconfigured VM instance instance-00000074 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1207.584459] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-053a71d3-2491-489d-9036-beac36f552a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.599585] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1207.599585] env[68279]: value = "task-2964008" [ 1207.599585] env[68279]: _type = "Task" [ 1207.599585] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.607598] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964008, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.972887] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.973176] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.973437] env[68279]: DEBUG nova.network.neutron [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1207.990488] env[68279]: DEBUG nova.compute.utils [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1208.109772] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.493107] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.512687] env[68279]: WARNING nova.network.neutron [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] c0150443-bd24-4fb2-8b27-b118d22250c3 already exists in list: networks containing: ['c0150443-bd24-4fb2-8b27-b118d22250c3']. ignoring it [ 1208.512887] env[68279]: WARNING nova.network.neutron [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] c0150443-bd24-4fb2-8b27-b118d22250c3 already exists in list: networks containing: ['c0150443-bd24-4fb2-8b27-b118d22250c3']. ignoring it [ 1208.610686] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964008, 'name': ReconfigVM_Task, 'duration_secs': 0.890706} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.610988] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594751', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'name': 'volume-47c73841-df53-45da-9226-b82f51a4c434', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5827dda0-48a4-4779-b6d2-7fbf73837583', 'attached_at': '', 'detached_at': '', 'volume_id': '47c73841-df53-45da-9226-b82f51a4c434', 'serial': '47c73841-df53-45da-9226-b82f51a4c434'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1208.611267] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1208.612069] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74507734-1757-42ab-9de6-9425fa0ac06d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.620995] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1208.621232] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22ce3b79-e918-40d7-9f64-b7c7115d0ac9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.712062] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1208.712296] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1208.712477] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Deleting the datastore file [datastore2] 5827dda0-48a4-4779-b6d2-7fbf73837583 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1208.712736] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52b7fd00-af69-403b-aad4-3b676f0babbf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.720342] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for the task: (returnval){ [ 1208.720342] env[68279]: value = "task-2964010" [ 1208.720342] env[68279]: _type = "Task" [ 1208.720342] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.727619] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.873213] env[68279]: DEBUG nova.network.neutron [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8fca9b89-a21f-432d-acfb-74c144eba872", "address": "fa:16:3e:2f:e2:1e", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fca9b89-a2", "ovs_interfaceid": "8fca9b89-a21f-432d-acfb-74c144eba872", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ef908c81-5301-4ed1-91cd-045d037f3909", "address": "fa:16:3e:21:58:29", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef908c81-53", "ovs_interfaceid": "ef908c81-5301-4ed1-91cd-045d037f3909", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.923849] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.924042] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.924218] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.924370] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.231322] env[68279]: DEBUG oslo_vmware.api [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Task: {'id': task-2964010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083729} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.231641] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1209.231792] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1209.232058] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1209.282302] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1209.282634] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84c6112a-81e1-4ad6-bca1-d1ea36546180 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.292241] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0deb6cd-eeff-4498-a193-9d3334342fc6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.328940] env[68279]: ERROR nova.compute.manager [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Failed to detach volume 47c73841-df53-45da-9226-b82f51a4c434 from /dev/sda: nova.exception.InstanceNotFound: Instance 5827dda0-48a4-4779-b6d2-7fbf73837583 could not be found. [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Traceback (most recent call last): [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self.driver.rebuild(**kwargs) [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] raise NotImplementedError() [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] NotImplementedError [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] During handling of the above exception, another exception occurred: [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Traceback (most recent call last): [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self.driver.detach_volume(context, old_connection_info, [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] return self._volumeops.detach_volume(connection_info, instance) [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self._detach_volume_vmdk(connection_info, instance) [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] stable_ref.fetch_moref(session) [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] nova.exception.InstanceNotFound: Instance 5827dda0-48a4-4779-b6d2-7fbf73837583 could not be found. [ 1209.328940] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.376031] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.378488] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.378488] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.378488] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be11b98-88f6-4c70-948a-79a9e7e47df1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.397421] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1209.397638] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1209.397796] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1209.398217] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1209.398217] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1209.398384] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1209.398668] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1209.398746] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1209.398908] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1209.399089] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1209.399267] env[68279]: DEBUG nova.virt.hardware [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1209.405525] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfiguring VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1209.408237] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44f6eb54-befb-4c2a-977b-d45db8f1ff90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.422175] env[68279]: DEBUG nova.compute.manager [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-changed-ef908c81-5301-4ed1-91cd-045d037f3909 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1209.422320] env[68279]: DEBUG nova.compute.manager [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing instance network info cache due to event network-changed-ef908c81-5301-4ed1-91cd-045d037f3909. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1209.423021] env[68279]: DEBUG oslo_concurrency.lockutils [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.423021] env[68279]: DEBUG oslo_concurrency.lockutils [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.423021] env[68279]: DEBUG nova.network.neutron [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Refreshing network info cache for port ef908c81-5301-4ed1-91cd-045d037f3909 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1209.428148] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Getting list of instances from cluster (obj){ [ 1209.428148] env[68279]: value = "domain-c8" [ 1209.428148] env[68279]: _type = "ClusterComputeResource" [ 1209.428148] env[68279]: } {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1209.430092] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d75635-962a-4724-b1fe-2c492b37e861 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.435404] env[68279]: DEBUG oslo_vmware.api [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1209.435404] env[68279]: value = "task-2964011" [ 1209.435404] env[68279]: _type = "Task" [ 1209.435404] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.449018] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Got total of 6 instances {{(pid=68279) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1209.449220] env[68279]: WARNING nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] While synchronizing instance power states, found 7 instances in the database and 6 instances on the hypervisor. [ 1209.449379] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Triggering sync for uuid 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 {{(pid=68279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1209.449565] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Triggering sync for uuid 84b2828a-e62c-45b2-a5ee-067ca66e626b {{(pid=68279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1209.449718] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Triggering sync for uuid 5a7e2125-3310-4fcb-a281-59b0a2c07f67 {{(pid=68279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1209.449867] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Triggering sync for uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1209.450022] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Triggering sync for uuid 5827dda0-48a4-4779-b6d2-7fbf73837583 {{(pid=68279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1209.450172] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Triggering sync for uuid 79905489-388d-4540-bdff-1c6a02f8bebd {{(pid=68279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1209.450314] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Triggering sync for uuid 78b58db9-0616-428d-999c-2f6548008466 {{(pid=68279) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1209.450933] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.451168] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.451422] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.451605] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.451861] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.452060] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.452291] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.452506] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "5827dda0-48a4-4779-b6d2-7fbf73837583" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.452705] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.452879] env[68279]: INFO nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] During sync_power_state the instance has a pending task (rebuilding). Skip. [ 1209.453049] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.453237] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.453437] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.453626] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "78b58db9-0616-428d-999c-2f6548008466" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.453833] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "78b58db9-0616-428d-999c-2f6548008466" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.454063] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.454230] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1209.462231] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0cabb1-e306-47c1-9881-504993b18a79 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.465058] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865b8a4a-536f-45b8-8851-7256a1ff908a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.467948] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2807c7a-3e6c-4f73-8359-3a8b87fb185c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.470563] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebb9f68-1894-4ea4-8a8e-da28d6f55e8b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.473357] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1d492c-5cf6-4dfe-994a-bdca4c4f7a1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.475537] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.479292] env[68279]: DEBUG oslo_vmware.api [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.518136] env[68279]: DEBUG nova.compute.utils [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Build of instance 5827dda0-48a4-4779-b6d2-7fbf73837583 aborted: Failed to rebuild volume backed instance. {{(pid=68279) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1209.521372] env[68279]: ERROR nova.compute.manager [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 5827dda0-48a4-4779-b6d2-7fbf73837583 aborted: Failed to rebuild volume backed instance. [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Traceback (most recent call last): [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self.driver.rebuild(**kwargs) [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] raise NotImplementedError() [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] NotImplementedError [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] During handling of the above exception, another exception occurred: [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Traceback (most recent call last): [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self._detach_root_volume(context, instance, root_bdm) [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] with excutils.save_and_reraise_exception(): [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self.force_reraise() [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] raise self.value [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self.driver.detach_volume(context, old_connection_info, [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] return self._volumeops.detach_volume(connection_info, instance) [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self._detach_volume_vmdk(connection_info, instance) [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] stable_ref.fetch_moref(session) [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] nova.exception.InstanceNotFound: Instance 5827dda0-48a4-4779-b6d2-7fbf73837583 could not be found. [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] During handling of the above exception, another exception occurred: [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Traceback (most recent call last): [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] yield [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1209.521372] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self._do_rebuild_instance_with_claim( [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self._do_rebuild_instance( [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self._rebuild_default_impl(**kwargs) [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] self._rebuild_volume_backed_instance( [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] raise exception.BuildAbortException( [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] nova.exception.BuildAbortException: Build of instance 5827dda0-48a4-4779-b6d2-7fbf73837583 aborted: Failed to rebuild volume backed instance. [ 1209.523032] env[68279]: ERROR nova.compute.manager [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] [ 1209.552994] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.809017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "78b58db9-0616-428d-999c-2f6548008466" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.948452] env[68279]: DEBUG oslo_vmware.api [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964011, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.978954] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.979213] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.979431] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.979525] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1209.980425] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28c2baf-6744-40cc-b0f6-9c51f025ef0b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.984694] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.533s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.988726] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd02bad0-0025-4b8d-a709-1de4eced800c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.992795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.541s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.993122] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.541s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.993428] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "78b58db9-0616-428d-999c-2f6548008466" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.994098] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.185s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.994285] env[68279]: DEBUG nova.compute.manager [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Going to confirm migration 6 {{(pid=68279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1209.995787] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.006295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.453s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.006523] env[68279]: INFO nova.compute.manager [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Attaching volume 09238659-f0ec-4555-83f7-7cb88fd84022 to /dev/sdb [ 1210.008682] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9d45c3-7e70-413d-87a3-f01afabd9ab6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.017511] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0a826a-2adb-4a96-a918-b1d3201a41f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.051287] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180025MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1210.051433] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.051663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.056666] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86551ebb-63e8-499b-9129-9923202925e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.063625] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d43392e-0cd6-4a50-96c7-09cbc7649939 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.759566] env[68279]: DEBUG oslo_vmware.api [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964011, 'name': ReconfigVM_Task, 'duration_secs': 0.568939} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.765259] env[68279]: DEBUG nova.virt.block_device [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Updating existing volume attachment record: f362af07-43ce-4350-9064-37a14cbcdb22 {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1210.780111] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.780352] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfigured VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1210.782762] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.330s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.786236] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4093cf1c-97c1-48bd-b581-2ebe8be21672 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.820799] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.821022] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquired lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.821209] env[68279]: DEBUG nova.network.neutron [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1210.821387] env[68279]: DEBUG nova.objects.instance [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'info_cache' on Instance uuid 78b58db9-0616-428d-999c-2f6548008466 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.848468] env[68279]: DEBUG nova.network.neutron [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updated VIF entry in instance network info cache for port ef908c81-5301-4ed1-91cd-045d037f3909. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.848948] env[68279]: DEBUG nova.network.neutron [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8fca9b89-a21f-432d-acfb-74c144eba872", "address": "fa:16:3e:2f:e2:1e", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fca9b89-a2", "ovs_interfaceid": "8fca9b89-a21f-432d-acfb-74c144eba872", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ef908c81-5301-4ed1-91cd-045d037f3909", "address": "fa:16:3e:21:58:29", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef908c81-53", "ovs_interfaceid": "ef908c81-5301-4ed1-91cd-045d037f3909", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.254209] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Applying migration context for instance 78b58db9-0616-428d-999c-2f6548008466 as it has an incoming, in-progress migration 0c9a561b-4ed1-436e-935b-95e2a146d961. Migration status is confirming {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1211.255323] env[68279]: INFO nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating resource usage from migration 0c9a561b-4ed1-436e-935b-95e2a146d961 [ 1211.275903] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1211.276149] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 84b2828a-e62c-45b2-a5ee-067ca66e626b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1211.276287] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 5a7e2125-3310-4fcb-a281-59b0a2c07f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1211.276404] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance c07d8d3c-2af3-47b7-87cb-980c7dd0204d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1211.276519] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 79905489-388d-4540-bdff-1c6a02f8bebd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1211.276663] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Migration 0c9a561b-4ed1-436e-935b-95e2a146d961 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1211.276794] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 78b58db9-0616-428d-999c-2f6548008466 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1211.276986] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1211.277136] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1211.287051] env[68279]: DEBUG oslo_concurrency.lockutils [None req-28ccc690-6e7b-4b71-be78-65563172f71c tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-ef908c81-5301-4ed1-91cd-045d037f3909" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.458s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.294971] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.512s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.351256] env[68279]: DEBUG oslo_concurrency.lockutils [req-2584c902-49ed-49fa-8fdd-2dedddabdbda req-6a2f4975-3319-4093-a3bc-d1672ba982de service nova] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.365090] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd58e4ad-bc5c-47ca-94b8-4b454fa346bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.372462] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbde57d5-cc97-4ab1-9b74-320a0d0b3345 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.403619] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9249bc3-ea32-4b11-85cf-b88f136ceabe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.411149] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26e8d64-2b38-407c-bc2a-c4c76178a94f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.424348] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.754996] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.926914] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1212.142523] env[68279]: DEBUG nova.network.neutron [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance_info_cache with network_info: [{"id": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "address": "fa:16:3e:08:21:c7", "network": {"id": "c960accf-313b-43ad-be0d-0cea858041ee", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1708399090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd1384256d224e80bf6f25b9fd054376", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bdf1f33-fe", "ovs_interfaceid": "5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.186258] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquiring lock "5827dda0-48a4-4779-b6d2-7fbf73837583" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.186612] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.186882] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquiring lock "5827dda0-48a4-4779-b6d2-7fbf73837583-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.187123] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.187303] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.189514] env[68279]: INFO nova.compute.manager [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Terminating instance [ 1212.432681] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1212.432884] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.381s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.433013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.678s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.538922] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e42209-cf23-4f96-9920-e6772554af4b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.547447] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e61862d-f1f0-4524-b213-bef68bc4a435 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.581334] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff053644-7cef-4dac-8a1b-d279e6d2ca2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.589894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7081d9b-410c-41ab-bac2-abb7f3b2bdaf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.604647] env[68279]: DEBUG nova.compute.provider_tree [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.645639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Releasing lock "refresh_cache-78b58db9-0616-428d-999c-2f6548008466" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.645918] env[68279]: DEBUG nova.objects.instance [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'migration_context' on Instance uuid 78b58db9-0616-428d-999c-2f6548008466 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.695541] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-8fca9b89-a21f-432d-acfb-74c144eba872" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.695818] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-8fca9b89-a21f-432d-acfb-74c144eba872" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.697952] env[68279]: DEBUG nova.compute.manager [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1212.698531] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04dc679a-5e63-49e4-ac00-2bf85d64b039 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.708694] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7497cd36-3b7b-48a3-8bd0-2af9d61baa7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.740284] env[68279]: WARNING nova.virt.vmwareapi.driver [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 5827dda0-48a4-4779-b6d2-7fbf73837583 could not be found. [ 1212.740510] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.740837] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86960eae-0370-46ff-8af4-ef781db2c6ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.749470] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd32450-db1d-42f5-8ac7-644a8374a5d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.778278] env[68279]: WARNING nova.virt.vmwareapi.vmops [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5827dda0-48a4-4779-b6d2-7fbf73837583 could not be found. [ 1212.778491] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1212.778659] env[68279]: INFO nova.compute.manager [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1212.778924] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1212.779198] env[68279]: DEBUG nova.compute.manager [-] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1212.779297] env[68279]: DEBUG nova.network.neutron [-] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1213.108368] env[68279]: DEBUG nova.scheduler.client.report [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.148669] env[68279]: DEBUG nova.objects.base [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Object Instance<78b58db9-0616-428d-999c-2f6548008466> lazy-loaded attributes: info_cache,migration_context {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1213.149705] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f0deb7-4d36-41f0-a4a7-587a22a13230 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.172693] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26038018-0c03-421d-89aa-3019c71e5d12 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.178767] env[68279]: DEBUG oslo_vmware.api [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1213.178767] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3dd4b-9894-c95a-5948-cdf401fa31fa" [ 1213.178767] env[68279]: _type = "Task" [ 1213.178767] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.187242] env[68279]: DEBUG oslo_vmware.api [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3dd4b-9894-c95a-5948-cdf401fa31fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.200159] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.200350] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.201453] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc0678f-5791-4395-bb10-72034d6b660b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.220879] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e44f262-9ee5-4c57-8362-5765f3eebe49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.253467] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfiguring VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1213.253817] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a3a6957-c9fe-498d-8ab1-01b001e10f3c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.272815] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1213.272815] env[68279]: value = "task-2964014" [ 1213.272815] env[68279]: _type = "Task" [ 1213.272815] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.284258] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.600607] env[68279]: DEBUG nova.compute.manager [req-2b8519f3-2eb3-41f9-89f0-1600de7ae75c req-dfeeae35-43d3-49b6-b9a0-1b9b3ead3540 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Received event network-vif-deleted-6a8f6f79-2136-4be2-a2a5-cb6b32370b13 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1213.600830] env[68279]: INFO nova.compute.manager [req-2b8519f3-2eb3-41f9-89f0-1600de7ae75c req-dfeeae35-43d3-49b6-b9a0-1b9b3ead3540 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Neutron deleted interface 6a8f6f79-2136-4be2-a2a5-cb6b32370b13; detaching it from the instance and deleting it from the info cache [ 1213.601007] env[68279]: DEBUG nova.network.neutron [req-2b8519f3-2eb3-41f9-89f0-1600de7ae75c req-dfeeae35-43d3-49b6-b9a0-1b9b3ead3540 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.614231] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.181s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.614446] env[68279]: INFO nova.compute.manager [None req-6b6a0d77-0242-4a7c-846e-b10c3389de04 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Successfully reverted task state from rebuilding on failure for instance. [ 1213.692203] env[68279]: DEBUG oslo_vmware.api [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3dd4b-9894-c95a-5948-cdf401fa31fa, 'name': SearchDatastore_Task, 'duration_secs': 0.008468} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.692203] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.692203] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.783180] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.976199] env[68279]: DEBUG nova.network.neutron [-] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.103942] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aaeb3d1e-e709-4be3-bc3a-49e1dc6f927c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.114862] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28537ee8-9954-4e28-987b-931dc6e2d7f0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.145703] env[68279]: DEBUG nova.compute.manager [req-2b8519f3-2eb3-41f9-89f0-1600de7ae75c req-dfeeae35-43d3-49b6-b9a0-1b9b3ead3540 service nova] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Detach interface failed, port_id=6a8f6f79-2136-4be2-a2a5-cb6b32370b13, reason: Instance 5827dda0-48a4-4779-b6d2-7fbf73837583 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1214.286961] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.318299] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db003c13-ce15-443d-a1b5-ef3a88a37797 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.326576] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94ee518-0be0-4af4-8fc8-245a63a5c805 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.357770] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77369653-3b6a-4fe0-a794-0571622f4fce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.364871] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4f8518-c4ef-44f6-8996-e56ac422ecff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.377994] env[68279]: DEBUG nova.compute.provider_tree [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.479789] env[68279]: INFO nova.compute.manager [-] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Took 1.70 seconds to deallocate network for instance. [ 1214.783856] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.882046] env[68279]: DEBUG nova.scheduler.client.report [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.027750] env[68279]: INFO nova.compute.manager [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Took 0.55 seconds to detach 1 volumes for instance. [ 1215.030974] env[68279]: DEBUG nova.compute.manager [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Deleting volume: 47c73841-df53-45da-9226-b82f51a4c434 {{(pid=68279) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1215.287137] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.313941] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1215.314213] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594764', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'name': 'volume-09238659-f0ec-4555-83f7-7cb88fd84022', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '79905489-388d-4540-bdff-1c6a02f8bebd', 'attached_at': '', 'detached_at': '', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'serial': '09238659-f0ec-4555-83f7-7cb88fd84022'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1215.315064] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304cac52-563f-4b55-8c37-66a3db5718e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.330703] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242a4f98-8f51-4077-9738-7d2c2c2fd923 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.354011] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] volume-09238659-f0ec-4555-83f7-7cb88fd84022/volume-09238659-f0ec-4555-83f7-7cb88fd84022.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1215.354250] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8f3f9c4-a79a-4f80-9897-351636d44559 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.370235] env[68279]: DEBUG oslo_vmware.api [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1215.370235] env[68279]: value = "task-2964016" [ 1215.370235] env[68279]: _type = "Task" [ 1215.370235] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.377719] env[68279]: DEBUG oslo_vmware.api [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964016, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.571972] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.680405] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.680619] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.680785] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.784845] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.880981] env[68279]: DEBUG oslo_vmware.api [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964016, 'name': ReconfigVM_Task, 'duration_secs': 0.323969} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.881281] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Reconfigured VM instance instance-00000075 to attach disk [datastore1] volume-09238659-f0ec-4555-83f7-7cb88fd84022/volume-09238659-f0ec-4555-83f7-7cb88fd84022.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1215.885859] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abbfef82-7903-43b1-8eec-51d2ff182194 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.896069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.204s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.898663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.327s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.898876] env[68279]: DEBUG nova.objects.instance [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lazy-loading 'resources' on Instance uuid 5827dda0-48a4-4779-b6d2-7fbf73837583 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.904255] env[68279]: DEBUG oslo_vmware.api [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1215.904255] env[68279]: value = "task-2964017" [ 1215.904255] env[68279]: _type = "Task" [ 1215.904255] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.912499] env[68279]: DEBUG oslo_vmware.api [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.230849] env[68279]: INFO nova.compute.manager [None req-2d81f683-ab64-445d-9f37-ace71d380d10 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Get console output [ 1216.231218] env[68279]: WARNING nova.virt.vmwareapi.driver [None req-2d81f683-ab64-445d-9f37-ace71d380d10 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] The console log is missing. Check your VSPC configuration [ 1216.286157] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.416166] env[68279]: DEBUG oslo_vmware.api [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964017, 'name': ReconfigVM_Task, 'duration_secs': 0.139121} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.416486] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594764', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'name': 'volume-09238659-f0ec-4555-83f7-7cb88fd84022', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '79905489-388d-4540-bdff-1c6a02f8bebd', 'attached_at': '', 'detached_at': '', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'serial': '09238659-f0ec-4555-83f7-7cb88fd84022'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1216.454500] env[68279]: INFO nova.scheduler.client.report [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted allocation for migration 0c9a561b-4ed1-436e-935b-95e2a146d961 [ 1216.487923] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eece9c0-3d02-406a-8a6c-454b57fc3b1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.496621] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bee6b6b-d721-4a20-8859-1993383b6f16 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.526744] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7701ac54-c2e8-4e6d-a768-10895b38fca0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.533475] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e481ce-9e09-43de-9f6c-1d0d1b2cdc28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.547648] env[68279]: DEBUG nova.compute.provider_tree [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.786159] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.959069] env[68279]: DEBUG oslo_concurrency.lockutils [None req-27898a95-fa88-4f21-8a25-309a2292a207 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.965s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.050387] env[68279]: DEBUG nova.scheduler.client.report [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.287311] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.454255] env[68279]: DEBUG nova.objects.instance [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lazy-loading 'flavor' on Instance uuid 79905489-388d-4540-bdff-1c6a02f8bebd {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.554759] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.656s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.786805] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.959867] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3401d9b7-2a3b-41db-9dc2-34bf2fa67d72 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.953s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.035447] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.035740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.074181] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8d92c237-b24d-472a-b4e6-2ba5a61c3c69 tempest-ServerActionsV293TestJSON-2083657082 tempest-ServerActionsV293TestJSON-2083657082-project-member] Lock "5827dda0-48a4-4779-b6d2-7fbf73837583" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.888s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.288262] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.539696] env[68279]: INFO nova.compute.manager [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Detaching volume 09238659-f0ec-4555-83f7-7cb88fd84022 [ 1218.573978] env[68279]: INFO nova.virt.block_device [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Attempting to driver detach volume 09238659-f0ec-4555-83f7-7cb88fd84022 from mountpoint /dev/sdb [ 1218.574246] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1218.574445] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594764', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'name': 'volume-09238659-f0ec-4555-83f7-7cb88fd84022', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '79905489-388d-4540-bdff-1c6a02f8bebd', 'attached_at': '', 'detached_at': '', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'serial': '09238659-f0ec-4555-83f7-7cb88fd84022'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1218.575336] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bc6fd4-3b6d-4522-89ef-c18d1e2475a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.597837] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb985de7-d956-43f1-8434-ef4912647ad3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.604907] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671f140c-4655-4c1a-b458-3d6774d99091 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.625335] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a805e761-ae33-4932-8388-16bbc6a4dd79 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.639840] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] The volume has not been displaced from its original location: [datastore1] volume-09238659-f0ec-4555-83f7-7cb88fd84022/volume-09238659-f0ec-4555-83f7-7cb88fd84022.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1218.645019] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1218.645301] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-438a77b0-44d7-42a8-aa3e-c47506508415 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.662587] env[68279]: DEBUG oslo_vmware.api [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1218.662587] env[68279]: value = "task-2964018" [ 1218.662587] env[68279]: _type = "Task" [ 1218.662587] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.670535] env[68279]: DEBUG oslo_vmware.api [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964018, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.788135] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.172939] env[68279]: DEBUG oslo_vmware.api [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964018, 'name': ReconfigVM_Task, 'duration_secs': 0.220757} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.172939] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1219.177476] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cbba542-91ac-4b3d-877c-5d8cc25a16eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.192146] env[68279]: DEBUG oslo_vmware.api [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1219.192146] env[68279]: value = "task-2964019" [ 1219.192146] env[68279]: _type = "Task" [ 1219.192146] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.199263] env[68279]: DEBUG oslo_vmware.api [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964019, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.289062] env[68279]: DEBUG oslo_vmware.api [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964014, 'name': ReconfigVM_Task, 'duration_secs': 5.735509} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.289367] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.289624] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Reconfigured VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1219.519132] env[68279]: DEBUG nova.compute.manager [req-d3c52cfa-37d2-4a4d-a536-e475ebec2149 req-957d7d8c-367a-46f8-bb03-9fc3321afb4a service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-vif-deleted-8fca9b89-a21f-432d-acfb-74c144eba872 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1219.519395] env[68279]: INFO nova.compute.manager [req-d3c52cfa-37d2-4a4d-a536-e475ebec2149 req-957d7d8c-367a-46f8-bb03-9fc3321afb4a service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Neutron deleted interface 8fca9b89-a21f-432d-acfb-74c144eba872; detaching it from the instance and deleting it from the info cache [ 1219.519631] env[68279]: DEBUG nova.network.neutron [req-d3c52cfa-37d2-4a4d-a536-e475ebec2149 req-957d7d8c-367a-46f8-bb03-9fc3321afb4a service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ef908c81-5301-4ed1-91cd-045d037f3909", "address": "fa:16:3e:21:58:29", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef908c81-53", "ovs_interfaceid": "ef908c81-5301-4ed1-91cd-045d037f3909", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.703577] env[68279]: DEBUG oslo_vmware.api [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964019, 'name': ReconfigVM_Task, 'duration_secs': 0.13156} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.704022] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594764', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'name': 'volume-09238659-f0ec-4555-83f7-7cb88fd84022', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '79905489-388d-4540-bdff-1c6a02f8bebd', 'attached_at': '', 'detached_at': '', 'volume_id': '09238659-f0ec-4555-83f7-7cb88fd84022', 'serial': '09238659-f0ec-4555-83f7-7cb88fd84022'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1220.022180] env[68279]: DEBUG oslo_concurrency.lockutils [req-d3c52cfa-37d2-4a4d-a536-e475ebec2149 req-957d7d8c-367a-46f8-bb03-9fc3321afb4a service nova] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.022400] env[68279]: DEBUG oslo_concurrency.lockutils [req-d3c52cfa-37d2-4a4d-a536-e475ebec2149 req-957d7d8c-367a-46f8-bb03-9fc3321afb4a service nova] Acquired lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.023299] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33ec6f6-e478-4c13-b36b-4d7dc89379ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.042918] env[68279]: DEBUG oslo_concurrency.lockutils [req-d3c52cfa-37d2-4a4d-a536-e475ebec2149 req-957d7d8c-367a-46f8-bb03-9fc3321afb4a service nova] Releasing lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.043074] env[68279]: WARNING nova.compute.manager [req-d3c52cfa-37d2-4a4d-a536-e475ebec2149 req-957d7d8c-367a-46f8-bb03-9fc3321afb4a service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Detach interface failed, port_id=8fca9b89-a21f-432d-acfb-74c144eba872, reason: No device with interface-id 8fca9b89-a21f-432d-acfb-74c144eba872 exists on VM: nova.exception.NotFound: No device with interface-id 8fca9b89-a21f-432d-acfb-74c144eba872 exists on VM [ 1220.246695] env[68279]: DEBUG nova.objects.instance [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lazy-loading 'flavor' on Instance uuid 79905489-388d-4540-bdff-1c6a02f8bebd {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.503640] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.503862] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.504102] env[68279]: DEBUG nova.network.neutron [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1220.546991] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.547242] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.547448] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.547628] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.547795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.549721] env[68279]: INFO nova.compute.manager [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Terminating instance [ 1221.053349] env[68279]: DEBUG nova.compute.manager [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.053606] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.054495] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf630367-4128-4134-987c-dd796e0945da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.062501] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.062501] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-527a577a-c496-4f71-a48a-fada67cb67ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.068501] env[68279]: DEBUG oslo_vmware.api [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1221.068501] env[68279]: value = "task-2964020" [ 1221.068501] env[68279]: _type = "Task" [ 1221.068501] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.082491] env[68279]: DEBUG oslo_vmware.api [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.254945] env[68279]: DEBUG oslo_concurrency.lockutils [None req-ed417f8d-09a8-4da9-b1ea-818ad91bb3e6 tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.219s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.267050] env[68279]: INFO nova.network.neutron [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Port ef908c81-5301-4ed1-91cd-045d037f3909 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1221.268029] env[68279]: DEBUG nova.network.neutron [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [{"id": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "address": "fa:16:3e:4e:28:72", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ccbd4c-94", "ovs_interfaceid": "43ccbd4c-94a9-45cd-86db-6ab1445ebaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.544379] env[68279]: DEBUG nova.compute.manager [req-4eda9b3f-0292-4f8e-b34d-3138d81752eb req-368f8c95-f191-43e3-b9ba-3d9f59ea66f9 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-vif-deleted-ef908c81-5301-4ed1-91cd-045d037f3909 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1221.578338] env[68279]: DEBUG oslo_vmware.api [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964020, 'name': PowerOffVM_Task, 'duration_secs': 0.200625} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.578556] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1221.579673] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1221.579927] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ba93ad9-42a7-4a09-86e0-9bbac59a2c61 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.679206] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1221.679507] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1221.679702] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleting the datastore file [datastore1] c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1221.679970] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67709300-fb5a-4577-8fe6-8f08cafd37c1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.686594] env[68279]: DEBUG oslo_vmware.api [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1221.686594] env[68279]: value = "task-2964022" [ 1221.686594] env[68279]: _type = "Task" [ 1221.686594] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.696448] env[68279]: DEBUG oslo_vmware.api [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.771168] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-c07d8d3c-2af3-47b7-87cb-980c7dd0204d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.196905] env[68279]: DEBUG oslo_vmware.api [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171395} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.197275] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.197392] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.197523] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.197691] env[68279]: INFO nova.compute.manager [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1222.197928] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1222.198139] env[68279]: DEBUG nova.compute.manager [-] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1222.198234] env[68279]: DEBUG nova.network.neutron [-] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1222.240433] env[68279]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port ef908c81-5301-4ed1-91cd-045d037f3909 could not be found.", "detail": ""}} {{(pid=68279) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1222.240663] env[68279]: DEBUG nova.network.neutron [-] Unable to show port ef908c81-5301-4ed1-91cd-045d037f3909 as it no longer exists. {{(pid=68279) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1222.276390] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0f0e48a-8963-4ebb-b859-09aeb04a3259 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-c07d8d3c-2af3-47b7-87cb-980c7dd0204d-8fca9b89-a21f-432d-acfb-74c144eba872" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.580s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.297140] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.297140] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.297140] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "79905489-388d-4540-bdff-1c6a02f8bebd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.297140] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.297140] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.299299] env[68279]: INFO nova.compute.manager [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Terminating instance [ 1222.432362] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "e246ae0f-1679-4757-acf2-ef5239f3c36d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.432547] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.803442] env[68279]: DEBUG nova.compute.manager [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1222.803627] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1222.804571] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8ae1a7-6987-4b6d-8082-7d912c3c4c84 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.811809] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1222.812072] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ed92174-c662-4e8a-983b-927aa330f19b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.818424] env[68279]: DEBUG oslo_vmware.api [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1222.818424] env[68279]: value = "task-2964023" [ 1222.818424] env[68279]: _type = "Task" [ 1222.818424] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.825731] env[68279]: DEBUG oslo_vmware.api [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.936018] env[68279]: DEBUG nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1223.144376] env[68279]: DEBUG nova.network.neutron [-] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.329373] env[68279]: DEBUG oslo_vmware.api [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964023, 'name': PowerOffVM_Task, 'duration_secs': 0.189066} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.329744] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1223.329783] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1223.330042] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-513eb170-8ad2-48bb-bd39-fb679cda64a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.394747] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1223.395064] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1223.395361] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleting the datastore file [datastore2] 79905489-388d-4540-bdff-1c6a02f8bebd {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1223.395634] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2684bf2-b9ca-4b87-8df2-8eea6fded524 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.401807] env[68279]: DEBUG oslo_vmware.api [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for the task: (returnval){ [ 1223.401807] env[68279]: value = "task-2964025" [ 1223.401807] env[68279]: _type = "Task" [ 1223.401807] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.409572] env[68279]: DEBUG oslo_vmware.api [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.458848] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.459153] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.460736] env[68279]: INFO nova.compute.claims [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1223.576272] env[68279]: DEBUG nova.compute.manager [req-be3aa30e-932f-4549-9049-66e36654abcf req-95f94d88-f9a4-4163-bdb3-a563e367a065 service nova] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Received event network-vif-deleted-43ccbd4c-94a9-45cd-86db-6ab1445ebaf0 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1223.647247] env[68279]: INFO nova.compute.manager [-] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Took 1.45 seconds to deallocate network for instance. [ 1223.912440] env[68279]: DEBUG oslo_vmware.api [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Task: {'id': task-2964025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140518} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.912674] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.912805] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.912976] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.913169] env[68279]: INFO nova.compute.manager [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1223.913398] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1223.913586] env[68279]: DEBUG nova.compute.manager [-] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.913678] env[68279]: DEBUG nova.network.neutron [-] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1224.156283] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.564871] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ce5f26-66c0-4530-b9ab-cdf89521f178 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.572885] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0d6554-1b63-4230-be72-26d48e16bdee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.605782] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978af114-c58b-40bc-901e-4922854a4eb0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.614145] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2332cef-16ea-4215-861a-26999746d096 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.627972] env[68279]: DEBUG nova.compute.provider_tree [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.109832] env[68279]: DEBUG nova.network.neutron [-] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.149483] env[68279]: ERROR nova.scheduler.client.report [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [req-a170df49-bb23-43a7-9e93-0b65aa144f04] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a170df49-bb23-43a7-9e93-0b65aa144f04"}]} [ 1225.167891] env[68279]: DEBUG nova.scheduler.client.report [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1225.181844] env[68279]: DEBUG nova.scheduler.client.report [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1225.182106] env[68279]: DEBUG nova.compute.provider_tree [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.193738] env[68279]: DEBUG nova.scheduler.client.report [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1225.212430] env[68279]: DEBUG nova.scheduler.client.report [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1225.298275] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc473e25-f1af-43cd-9bc7-9909de2662b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.306551] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b073dbdb-1ad1-4dfa-b350-77b4d4a4f586 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.336070] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebf4062-e321-40db-8e1d-fb77800f94ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.342786] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a37a397-fe73-4450-a7b8-e186fbf36b55 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.355492] env[68279]: DEBUG nova.compute.provider_tree [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.601111] env[68279]: DEBUG nova.compute.manager [req-73c86b2a-57eb-4e80-896d-23bf93501de1 req-95e043dd-968d-41b2-b57a-09de719c1691 service nova] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Received event network-vif-deleted-abfe62ee-1d24-42d6-9928-6425596e2a97 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1225.612954] env[68279]: INFO nova.compute.manager [-] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Took 1.70 seconds to deallocate network for instance. [ 1225.885305] env[68279]: DEBUG nova.scheduler.client.report [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1225.885305] env[68279]: DEBUG nova.compute.provider_tree [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 159 to 160 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1225.885305] env[68279]: DEBUG nova.compute.provider_tree [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1226.119259] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.389382] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.930s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.389913] env[68279]: DEBUG nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1226.392639] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.237s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.392857] env[68279]: DEBUG nova.objects.instance [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'resources' on Instance uuid c07d8d3c-2af3-47b7-87cb-980c7dd0204d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.896323] env[68279]: DEBUG nova.compute.utils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1226.900363] env[68279]: DEBUG nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1226.900527] env[68279]: DEBUG nova.network.neutron [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1226.944802] env[68279]: DEBUG nova.policy [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94e9d9c1927948f5bb8f42235b09f008', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d7a90a39b864e3e985b3b828c3fd363', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1226.994811] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897f4096-af98-4c72-ad74-216a2a33d52d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.002380] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88463b4f-d81f-493d-ba6a-525e6da0d15f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.035402] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dc9498-19cd-4419-acd4-0d1a9a4cd120 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.043406] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e070f494-766c-4d50-9574-2b017e6d32d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.056740] env[68279]: DEBUG nova.compute.provider_tree [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.267591] env[68279]: DEBUG nova.network.neutron [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Successfully created port: 0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1227.401567] env[68279]: DEBUG nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1227.562379] env[68279]: DEBUG nova.scheduler.client.report [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.066608] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.068987] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.950s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.069247] env[68279]: DEBUG nova.objects.instance [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lazy-loading 'resources' on Instance uuid 79905489-388d-4540-bdff-1c6a02f8bebd {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.088216] env[68279]: INFO nova.scheduler.client.report [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted allocations for instance c07d8d3c-2af3-47b7-87cb-980c7dd0204d [ 1228.411966] env[68279]: DEBUG nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1228.436016] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1228.436284] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.436442] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1228.436621] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.436776] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1228.436913] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1228.437129] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1228.437301] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1228.437468] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1228.437628] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1228.437797] env[68279]: DEBUG nova.virt.hardware [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1228.438669] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8c0424-ec8e-4c83-93a7-13254af14bab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.446525] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d425e31a-3f0c-4672-8c9f-374c0d11ca23 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.595763] env[68279]: DEBUG oslo_concurrency.lockutils [None req-c6ad67f0-e80c-4a87-b480-8c7f37e7fbee tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "c07d8d3c-2af3-47b7-87cb-980c7dd0204d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.048s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.619393] env[68279]: DEBUG nova.compute.manager [req-7a270e65-832d-494b-b1ba-13d6ce546b94 req-b604420d-b42b-410d-af4b-329b62786aaf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Received event network-vif-plugged-0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1228.619463] env[68279]: DEBUG oslo_concurrency.lockutils [req-7a270e65-832d-494b-b1ba-13d6ce546b94 req-b604420d-b42b-410d-af4b-329b62786aaf service nova] Acquiring lock "e246ae0f-1679-4757-acf2-ef5239f3c36d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.619628] env[68279]: DEBUG oslo_concurrency.lockutils [req-7a270e65-832d-494b-b1ba-13d6ce546b94 req-b604420d-b42b-410d-af4b-329b62786aaf service nova] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.619784] env[68279]: DEBUG oslo_concurrency.lockutils [req-7a270e65-832d-494b-b1ba-13d6ce546b94 req-b604420d-b42b-410d-af4b-329b62786aaf service nova] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.619975] env[68279]: DEBUG nova.compute.manager [req-7a270e65-832d-494b-b1ba-13d6ce546b94 req-b604420d-b42b-410d-af4b-329b62786aaf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] No waiting events found dispatching network-vif-plugged-0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1228.620147] env[68279]: WARNING nova.compute.manager [req-7a270e65-832d-494b-b1ba-13d6ce546b94 req-b604420d-b42b-410d-af4b-329b62786aaf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Received unexpected event network-vif-plugged-0f9fd296-d86e-4b84-9e0b-1037b955ee7f for instance with vm_state building and task_state spawning. [ 1228.661296] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f91dd7-da02-484d-a52e-516eda42f754 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.668864] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f9409e-87b5-4a91-91c6-b4a6929dcb9d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.700062] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be80a337-d095-44ed-b5db-c1ecba01329f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.706966] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5039f3b-2b5a-453f-92ae-2942cf39d7ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.720024] env[68279]: DEBUG nova.compute.provider_tree [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.721639] env[68279]: DEBUG nova.network.neutron [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Successfully updated port: 0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.225500] env[68279]: DEBUG nova.scheduler.client.report [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1229.228906] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.228994] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.229100] env[68279]: DEBUG nova.network.neutron [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1229.731835] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.663s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.761304] env[68279]: INFO nova.scheduler.client.report [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Deleted allocations for instance 79905489-388d-4540-bdff-1c6a02f8bebd [ 1229.776762] env[68279]: DEBUG nova.network.neutron [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1229.967455] env[68279]: DEBUG nova.network.neutron [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Updating instance_info_cache with network_info: [{"id": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "address": "fa:16:3e:01:46:6d", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f9fd296-d8", "ovs_interfaceid": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.269177] env[68279]: DEBUG oslo_concurrency.lockutils [None req-74a18582-b087-4143-b669-ad3ed8bda40e tempest-AttachVolumeNegativeTest-1225197835 tempest-AttachVolumeNegativeTest-1225197835-project-member] Lock "79905489-388d-4540-bdff-1c6a02f8bebd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.972s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.469705] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.470068] env[68279]: DEBUG nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Instance network_info: |[{"id": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "address": "fa:16:3e:01:46:6d", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f9fd296-d8", "ovs_interfaceid": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1230.470482] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:46:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '051f343d-ac4f-4070-a26d-467603122c81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f9fd296-d86e-4b84-9e0b-1037b955ee7f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1230.478238] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1230.478469] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1230.479103] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce092363-5ddb-4a4b-b475-d28789eb36f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.499475] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1230.499475] env[68279]: value = "task-2964026" [ 1230.499475] env[68279]: _type = "Task" [ 1230.499475] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.507187] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964026, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.647640] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.647880] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.649862] env[68279]: DEBUG nova.compute.manager [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Received event network-changed-0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1230.650053] env[68279]: DEBUG nova.compute.manager [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Refreshing instance network info cache due to event network-changed-0f9fd296-d86e-4b84-9e0b-1037b955ee7f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1230.650254] env[68279]: DEBUG oslo_concurrency.lockutils [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] Acquiring lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.650396] env[68279]: DEBUG oslo_concurrency.lockutils [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] Acquired lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.650557] env[68279]: DEBUG nova.network.neutron [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Refreshing network info cache for port 0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.012733] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964026, 'name': CreateVM_Task, 'duration_secs': 0.291879} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.013031] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1231.013846] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.014090] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.014502] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1231.014817] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c58c44f-dae8-455a-aab7-7480fe63747e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.019963] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1231.019963] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527da4d9-16c0-9f31-9e03-f3e33d9275f4" [ 1231.019963] env[68279]: _type = "Task" [ 1231.019963] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.029050] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527da4d9-16c0-9f31-9e03-f3e33d9275f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.153043] env[68279]: DEBUG nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1231.347958] env[68279]: DEBUG nova.network.neutron [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Updated VIF entry in instance network info cache for port 0f9fd296-d86e-4b84-9e0b-1037b955ee7f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1231.348379] env[68279]: DEBUG nova.network.neutron [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Updating instance_info_cache with network_info: [{"id": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "address": "fa:16:3e:01:46:6d", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f9fd296-d8", "ovs_interfaceid": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.532185] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527da4d9-16c0-9f31-9e03-f3e33d9275f4, 'name': SearchDatastore_Task, 'duration_secs': 0.011207} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.532463] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.532714] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1231.532977] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.533157] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.533352] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.533640] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a67d02c-178d-47cd-b168-18b812b3475e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.541889] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.542089] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1231.542805] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89372853-1537-4ef9-b044-5b0ca2f76d20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.547935] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1231.547935] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52690db6-7d60-ff73-5151-84fa5eca3546" [ 1231.547935] env[68279]: _type = "Task" [ 1231.547935] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.554948] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52690db6-7d60-ff73-5151-84fa5eca3546, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.678955] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.679254] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.680829] env[68279]: INFO nova.compute.claims [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.851300] env[68279]: DEBUG oslo_concurrency.lockutils [req-58f825eb-8f65-4e24-b4aa-67d78daf7210 req-32513dd1-1bce-4b27-8252-8bbbcabb24bf service nova] Releasing lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.058246] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52690db6-7d60-ff73-5151-84fa5eca3546, 'name': SearchDatastore_Task, 'duration_secs': 0.009218} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.059137] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2facc1ef-0305-4389-90fe-1676553dc3e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.063894] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1232.063894] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52379de6-ffce-7f57-1791-68674a3eb631" [ 1232.063894] env[68279]: _type = "Task" [ 1232.063894] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.071044] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52379de6-ffce-7f57-1791-68674a3eb631, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.576254] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52379de6-ffce-7f57-1791-68674a3eb631, 'name': SearchDatastore_Task, 'duration_secs': 0.011412} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.576582] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.576767] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1232.577045] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d3bb043-8a24-47d2-bacf-c261ddbc6c57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.583881] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1232.583881] env[68279]: value = "task-2964028" [ 1232.583881] env[68279]: _type = "Task" [ 1232.583881] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.591816] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.778349] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb73656-8e02-4ae9-ac2c-4fae009896fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.787011] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5caa4bf5-3bf9-4a99-af5b-c66ea9a62f5c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.821532] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67107a69-10c5-44c9-acdc-e1239ec075a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.830443] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e261d28-0602-4e74-b791-8b4718969fb7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.845552] env[68279]: DEBUG nova.compute.provider_tree [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.093110] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964028, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.350098] env[68279]: DEBUG nova.scheduler.client.report [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1233.593714] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964028, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513175} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.594131] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1233.594256] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1233.594430] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8b210cf-6e2a-4357-aedf-93296deba5c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.601073] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1233.601073] env[68279]: value = "task-2964030" [ 1233.601073] env[68279]: _type = "Task" [ 1233.601073] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.607918] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964030, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.854416] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.854993] env[68279]: DEBUG nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1234.110715] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065134} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.110977] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1234.111726] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac90ab3-b93a-43be-b58a-85507d5fe5c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.133011] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1234.133283] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f55c7d19-ae9f-48d6-93e0-7de5908fb8a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.152507] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1234.152507] env[68279]: value = "task-2964031" [ 1234.152507] env[68279]: _type = "Task" [ 1234.152507] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.161213] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964031, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.360152] env[68279]: DEBUG nova.compute.utils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1234.361665] env[68279]: DEBUG nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1234.361871] env[68279]: DEBUG nova.network.neutron [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1234.405041] env[68279]: DEBUG nova.policy [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1234.641719] env[68279]: DEBUG nova.network.neutron [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Successfully created port: e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1234.664025] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964031, 'name': ReconfigVM_Task, 'duration_secs': 0.286902} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.664025] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Reconfigured VM instance instance-00000077 to attach disk [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1234.664025] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34fdc8c0-0d59-413c-8bac-703121e89e20 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.670540] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1234.670540] env[68279]: value = "task-2964032" [ 1234.670540] env[68279]: _type = "Task" [ 1234.670540] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.677998] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964032, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.865595] env[68279]: DEBUG nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1235.180520] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964032, 'name': Rename_Task, 'duration_secs': 0.137064} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.180520] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1235.180722] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d09349bf-481c-4481-9194-28856ed5c0b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.187647] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1235.187647] env[68279]: value = "task-2964033" [ 1235.187647] env[68279]: _type = "Task" [ 1235.187647] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.194840] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.696986] env[68279]: DEBUG oslo_vmware.api [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964033, 'name': PowerOnVM_Task, 'duration_secs': 0.416272} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.698046] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1235.698046] env[68279]: INFO nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Took 7.29 seconds to spawn the instance on the hypervisor. [ 1235.698209] env[68279]: DEBUG nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1235.698930] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a945f8-588f-4a41-bec1-53dd55db7cc3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.875644] env[68279]: DEBUG nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1235.900279] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1235.900531] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1235.900688] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1235.900870] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1235.901023] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1235.901178] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1235.901378] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1235.901533] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1235.901696] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1235.901856] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1235.902041] env[68279]: DEBUG nova.virt.hardware [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1235.902877] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f466f78-e28a-467f-9f70-df29f9382eac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.910694] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca29b19-b98a-4590-8533-9ecc635f3b0e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.000615] env[68279]: DEBUG nova.compute.manager [req-bb675d9e-e580-4b51-92a3-bcf337c93d08 req-03de5983-6d14-45b4-86a1-e960a83d0c16 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-vif-plugged-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1236.000855] env[68279]: DEBUG oslo_concurrency.lockutils [req-bb675d9e-e580-4b51-92a3-bcf337c93d08 req-03de5983-6d14-45b4-86a1-e960a83d0c16 service nova] Acquiring lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.001170] env[68279]: DEBUG oslo_concurrency.lockutils [req-bb675d9e-e580-4b51-92a3-bcf337c93d08 req-03de5983-6d14-45b4-86a1-e960a83d0c16 service nova] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.001287] env[68279]: DEBUG oslo_concurrency.lockutils [req-bb675d9e-e580-4b51-92a3-bcf337c93d08 req-03de5983-6d14-45b4-86a1-e960a83d0c16 service nova] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.001468] env[68279]: DEBUG nova.compute.manager [req-bb675d9e-e580-4b51-92a3-bcf337c93d08 req-03de5983-6d14-45b4-86a1-e960a83d0c16 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] No waiting events found dispatching network-vif-plugged-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1236.001664] env[68279]: WARNING nova.compute.manager [req-bb675d9e-e580-4b51-92a3-bcf337c93d08 req-03de5983-6d14-45b4-86a1-e960a83d0c16 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received unexpected event network-vif-plugged-e9c7f070-ce09-4503-a0e5-76cfe063bc35 for instance with vm_state building and task_state spawning. [ 1236.078571] env[68279]: DEBUG nova.network.neutron [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Successfully updated port: e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1236.218222] env[68279]: INFO nova.compute.manager [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Took 12.78 seconds to build instance. [ 1236.584958] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.584958] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.584958] env[68279]: DEBUG nova.network.neutron [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1236.721074] env[68279]: DEBUG oslo_concurrency.lockutils [None req-f2d84ae4-f488-4c26-9d59-24ec59da5978 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.288s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.114957] env[68279]: DEBUG nova.network.neutron [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1237.291177] env[68279]: DEBUG nova.network.neutron [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.795122] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.795122] env[68279]: DEBUG nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Instance network_info: |[{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1237.795506] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:f2:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9c7f070-ce09-4503-a0e5-76cfe063bc35', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1237.802740] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1237.802976] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1237.803746] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-753a33ec-bbd7-4059-b990-4992da0b079c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.824678] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1237.824678] env[68279]: value = "task-2964034" [ 1237.824678] env[68279]: _type = "Task" [ 1237.824678] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.830668] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964034, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.039019] env[68279]: DEBUG nova.compute.manager [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1238.039357] env[68279]: DEBUG nova.compute.manager [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing instance network info cache due to event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1238.039695] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.039947] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.040239] env[68279]: DEBUG nova.network.neutron [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1238.334695] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964034, 'name': CreateVM_Task, 'duration_secs': 0.328219} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.334869] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1238.335630] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.335860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.336262] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1238.336551] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d73596d-423d-4cd8-84a0-91fe412cabb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.342056] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1238.342056] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3e449-1784-f55a-4b2c-9d3749510c3d" [ 1238.342056] env[68279]: _type = "Task" [ 1238.342056] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.350240] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3e449-1784-f55a-4b2c-9d3749510c3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.736151] env[68279]: DEBUG nova.network.neutron [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updated VIF entry in instance network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1238.736561] env[68279]: DEBUG nova.network.neutron [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.852500] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d3e449-1784-f55a-4b2c-9d3749510c3d, 'name': SearchDatastore_Task, 'duration_secs': 0.009816} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.852856] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.852964] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1238.853212] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.853358] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.853535] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1238.853813] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a7d8bae-360f-4300-ae29-a5a783d76fec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.862432] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1238.862602] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1238.863290] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b39c131-f1b7-410e-b8fb-3f57e2ecad87 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.868304] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1238.868304] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52117a7a-cf3a-c734-0f31-a0a2292ee897" [ 1238.868304] env[68279]: _type = "Task" [ 1238.868304] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.875925] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52117a7a-cf3a-c734-0f31-a0a2292ee897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.239257] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.239556] env[68279]: DEBUG nova.compute.manager [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Received event network-changed-0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.239746] env[68279]: DEBUG nova.compute.manager [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Refreshing instance network info cache due to event network-changed-0f9fd296-d86e-4b84-9e0b-1037b955ee7f. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1239.239956] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] Acquiring lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.240115] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] Acquired lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.240280] env[68279]: DEBUG nova.network.neutron [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Refreshing network info cache for port 0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1239.378578] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52117a7a-cf3a-c734-0f31-a0a2292ee897, 'name': SearchDatastore_Task, 'duration_secs': 0.008809} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.379320] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1e4878e-0d59-4346-8147-bcd33d8d51a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.383978] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1239.383978] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52255831-6ba8-6bcb-e9c1-defbcabb6b17" [ 1239.383978] env[68279]: _type = "Task" [ 1239.383978] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.392101] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52255831-6ba8-6bcb-e9c1-defbcabb6b17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.894654] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52255831-6ba8-6bcb-e9c1-defbcabb6b17, 'name': SearchDatastore_Task, 'duration_secs': 0.00917} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.896951] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.897228] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] a4d4e9c0-0165-4c11-ba98-1214e70b91a3/a4d4e9c0-0165-4c11-ba98-1214e70b91a3.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1239.897483] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62ab1418-010d-4bb7-9be9-05715e16e139 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.903615] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1239.903615] env[68279]: value = "task-2964035" [ 1239.903615] env[68279]: _type = "Task" [ 1239.903615] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.910973] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964035, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.968277] env[68279]: DEBUG nova.network.neutron [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Updated VIF entry in instance network info cache for port 0f9fd296-d86e-4b84-9e0b-1037b955ee7f. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1239.968701] env[68279]: DEBUG nova.network.neutron [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Updating instance_info_cache with network_info: [{"id": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "address": "fa:16:3e:01:46:6d", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f9fd296-d8", "ovs_interfaceid": "0f9fd296-d86e-4b84-9e0b-1037b955ee7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.413176] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964035, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467484} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.413408] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] a4d4e9c0-0165-4c11-ba98-1214e70b91a3/a4d4e9c0-0165-4c11-ba98-1214e70b91a3.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1240.413586] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1240.413824] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee6681da-403d-41cf-81dd-0876b0eb76a5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.420331] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1240.420331] env[68279]: value = "task-2964036" [ 1240.420331] env[68279]: _type = "Task" [ 1240.420331] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.427771] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964036, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.471493] env[68279]: DEBUG oslo_concurrency.lockutils [req-7ca515ff-1f1b-4be3-8898-491d973b56e3 req-853eb853-1d17-4e29-a7c7-8792b1c99edd service nova] Releasing lock "refresh_cache-e246ae0f-1679-4757-acf2-ef5239f3c36d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.929809] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964036, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062422} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.930202] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1240.931057] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3eeb8a-5a9b-41e8-8058-10d7c4977423 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.953445] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] a4d4e9c0-0165-4c11-ba98-1214e70b91a3/a4d4e9c0-0165-4c11-ba98-1214e70b91a3.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1240.953709] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7104273f-21f9-44ab-af12-c72e1f456dfa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.973168] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1240.973168] env[68279]: value = "task-2964037" [ 1240.973168] env[68279]: _type = "Task" [ 1240.973168] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.980873] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964037, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.483062] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.756919] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "78b58db9-0616-428d-999c-2f6548008466" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.757208] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.757537] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "78b58db9-0616-428d-999c-2f6548008466-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.758038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.758159] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.760452] env[68279]: INFO nova.compute.manager [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Terminating instance [ 1241.984317] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.267345] env[68279]: DEBUG nova.compute.manager [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1242.267611] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1242.267866] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2123d911-d73a-4f5f-bfc7-20f6f8156357 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.275925] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1242.275925] env[68279]: value = "task-2964038" [ 1242.275925] env[68279]: _type = "Task" [ 1242.275925] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.285651] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964038, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.486176] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964037, 'name': ReconfigVM_Task, 'duration_secs': 1.084118} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.486399] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Reconfigured VM instance instance-00000078 to attach disk [datastore1] a4d4e9c0-0165-4c11-ba98-1214e70b91a3/a4d4e9c0-0165-4c11-ba98-1214e70b91a3.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.486945] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34cf698a-2cc4-488e-b92f-b272f2a732c3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.494431] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1242.494431] env[68279]: value = "task-2964039" [ 1242.494431] env[68279]: _type = "Task" [ 1242.494431] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.501553] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964039, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.785818] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964038, 'name': PowerOffVM_Task, 'duration_secs': 0.205231} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.786086] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1242.786287] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1242.786483] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594759', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'name': 'volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '78b58db9-0616-428d-999c-2f6548008466', 'attached_at': '2025-03-12T08:53:37.000000', 'detached_at': '', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'serial': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1242.787273] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab39663-f8ee-4a06-8eff-128cb887cf93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.805513] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d2ab33-fff7-44b6-b9fa-3f794dc8e03d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.811906] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a81590-5152-4543-8a4e-30bdfeb60648 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.829036] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d38d7b3-687a-4d68-b9d0-cb5eed282a7c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.843128] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] The volume has not been displaced from its original location: [datastore1] volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7/volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1242.848278] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfiguring VM instance instance-00000076 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1242.848543] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96cd9736-e9cb-42ab-b1b4-51ace0d290ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.866360] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1242.866360] env[68279]: value = "task-2964040" [ 1242.866360] env[68279]: _type = "Task" [ 1242.866360] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.873726] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.003748] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964039, 'name': Rename_Task, 'duration_secs': 0.153589} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.004144] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1243.004197] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0df2a8f3-6045-4e1c-bae7-31bae7e895cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.009710] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1243.009710] env[68279]: value = "task-2964041" [ 1243.009710] env[68279]: _type = "Task" [ 1243.009710] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.024892] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.378545] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964040, 'name': ReconfigVM_Task, 'duration_secs': 0.171466} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.378887] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Reconfigured VM instance instance-00000076 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1243.383396] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74ee1214-0910-4647-bb8f-d1025516d6bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.398786] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1243.398786] env[68279]: value = "task-2964042" [ 1243.398786] env[68279]: _type = "Task" [ 1243.398786] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.406629] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964042, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.518697] env[68279]: DEBUG oslo_vmware.api [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964041, 'name': PowerOnVM_Task, 'duration_secs': 0.446829} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.520079] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1243.520079] env[68279]: INFO nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1243.520079] env[68279]: DEBUG nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1243.520459] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0c7268-8a91-4371-b6fd-bde0195d56bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.908814] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964042, 'name': ReconfigVM_Task, 'duration_secs': 0.13601} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.909123] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594759', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'name': 'volume-6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '78b58db9-0616-428d-999c-2f6548008466', 'attached_at': '2025-03-12T08:53:37.000000', 'detached_at': '', 'volume_id': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7', 'serial': '6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1243.909411] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1243.910156] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8384ea1c-82c3-4b6f-934c-a6644fb3f977 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.916143] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1243.916365] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f05cdd7a-fb08-4ec7-865a-fe0335b46943 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.036724] env[68279]: INFO nova.compute.manager [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Took 12.38 seconds to build instance. [ 1244.188846] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1244.189084] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1244.189182] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleting the datastore file [datastore1] 78b58db9-0616-428d-999c-2f6548008466 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.189437] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c6b97dd-f585-4f9a-b339-f0513ac83132 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.195262] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1244.195262] env[68279]: value = "task-2964044" [ 1244.195262] env[68279]: _type = "Task" [ 1244.195262] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.202903] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.539016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e499df1c-957c-4e57-b8e2-a70ade3256b2 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.891s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.704865] env[68279]: DEBUG oslo_vmware.api [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273466} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.705127] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1244.705327] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1244.705500] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1244.705667] env[68279]: INFO nova.compute.manager [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Took 2.44 seconds to destroy the instance on the hypervisor. [ 1244.705899] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1244.706098] env[68279]: DEBUG nova.compute.manager [-] [instance: 78b58db9-0616-428d-999c-2f6548008466] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1244.706194] env[68279]: DEBUG nova.network.neutron [-] [instance: 78b58db9-0616-428d-999c-2f6548008466] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1244.949713] env[68279]: DEBUG nova.compute.manager [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.949713] env[68279]: DEBUG nova.compute.manager [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing instance network info cache due to event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1244.949962] env[68279]: DEBUG oslo_concurrency.lockutils [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.950070] env[68279]: DEBUG oslo_concurrency.lockutils [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.950234] env[68279]: DEBUG nova.network.neutron [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1245.678125] env[68279]: DEBUG nova.network.neutron [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updated VIF entry in instance network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1245.678503] env[68279]: DEBUG nova.network.neutron [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.028581] env[68279]: DEBUG nova.network.neutron [-] [instance: 78b58db9-0616-428d-999c-2f6548008466] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.142659] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.143729] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.181051] env[68279]: DEBUG oslo_concurrency.lockutils [req-6074babb-4fbd-4c23-9d03-0485750c132c req-5151e70c-9959-446c-82a8-b13323fbfcd5 service nova] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.530782] env[68279]: INFO nova.compute.manager [-] [instance: 78b58db9-0616-428d-999c-2f6548008466] Took 1.82 seconds to deallocate network for instance. [ 1246.644927] env[68279]: DEBUG nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1247.073074] env[68279]: INFO nova.compute.manager [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Took 0.54 seconds to detach 1 volumes for instance. [ 1247.075460] env[68279]: DEBUG nova.compute.manager [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 78b58db9-0616-428d-999c-2f6548008466] Deleting volume: 6aea7cf4-fbf6-451e-b74f-4dbe2f0b94a7 {{(pid=68279) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1247.168313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.168592] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.170212] env[68279]: INFO nova.compute.claims [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1247.232150] env[68279]: DEBUG nova.compute.manager [req-921fb559-8eac-4df0-89b7-c307b95d7e9e req-51026694-2cbe-45d8-9c1f-036492621fe8 service nova] [instance: 78b58db9-0616-428d-999c-2f6548008466] Received event network-vif-deleted-5bdf1f33-fe52-4c6c-bc1b-7dded6a239fe {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1247.613332] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.260933] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7344badb-13f2-4ac6-8c8c-f8a85d627ec3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.268542] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d2cabd-eab7-4869-93d4-dcc09a9f198e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.297799] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ceb652-157a-421f-803c-5fdcf7c4f455 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.304450] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66ae22e-701e-4b71-85be-414d281dfa03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.318023] env[68279]: DEBUG nova.compute.provider_tree [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1248.839779] env[68279]: ERROR nova.scheduler.client.report [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [req-d244c544-b64a-43b4-a8f6-c2bc04747c9a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d244c544-b64a-43b4-a8f6-c2bc04747c9a"}]} [ 1248.857599] env[68279]: DEBUG nova.scheduler.client.report [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1248.869535] env[68279]: DEBUG nova.scheduler.client.report [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1248.869742] env[68279]: DEBUG nova.compute.provider_tree [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1248.879167] env[68279]: DEBUG nova.scheduler.client.report [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1248.895551] env[68279]: DEBUG nova.scheduler.client.report [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1248.977005] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a2bed5-863d-46f0-b1a1-83610f167d67 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.984147] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4d07df-a80d-47ba-ba23-87b4cc4b1bf1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.013500] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9a2494-877c-4d41-b036-5146899ca55c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.020096] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70579bc2-c74b-4d91-b8ca-9962bee5da3c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.032853] env[68279]: DEBUG nova.compute.provider_tree [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1249.563775] env[68279]: DEBUG nova.scheduler.client.report [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 162 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1249.564070] env[68279]: DEBUG nova.compute.provider_tree [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 162 to 163 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1249.564233] env[68279]: DEBUG nova.compute.provider_tree [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1250.069301] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.900s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.069823] env[68279]: DEBUG nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1250.072823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.460s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.073023] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.090602] env[68279]: INFO nova.scheduler.client.report [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted allocations for instance 78b58db9-0616-428d-999c-2f6548008466 [ 1250.577039] env[68279]: DEBUG nova.compute.utils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1250.578572] env[68279]: DEBUG nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1250.578765] env[68279]: DEBUG nova.network.neutron [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1250.597036] env[68279]: DEBUG oslo_concurrency.lockutils [None req-72da5a96-49df-4f3f-b596-9d098c868218 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "78b58db9-0616-428d-999c-2f6548008466" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.840s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.624622] env[68279]: DEBUG nova.policy [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1250.905935] env[68279]: DEBUG nova.network.neutron [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Successfully created port: a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1251.082338] env[68279]: DEBUG nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1251.641795] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.642237] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.642350] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.642511] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.642705] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.645121] env[68279]: INFO nova.compute.manager [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Terminating instance [ 1252.091654] env[68279]: DEBUG nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1252.119203] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1252.119768] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1252.119768] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1252.119911] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1252.119951] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1252.120081] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1252.120292] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1252.120446] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1252.120609] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1252.120768] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1252.120936] env[68279]: DEBUG nova.virt.hardware [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1252.121809] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3529727-fb65-4b95-b987-9be7aaccf093 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.129503] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3c5f61-b127-443a-9df7-1bd955be3acc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.148877] env[68279]: DEBUG nova.compute.manager [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1252.149087] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1252.149781] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f18368-0c85-4991-aeb2-3f7ee14468d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.156060] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1252.156268] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0049cdac-7453-4a1b-a0d0-cedc0c021396 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.162790] env[68279]: DEBUG oslo_vmware.api [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1252.162790] env[68279]: value = "task-2964046" [ 1252.162790] env[68279]: _type = "Task" [ 1252.162790] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.170277] env[68279]: DEBUG oslo_vmware.api [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964046, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.314947] env[68279]: DEBUG nova.compute.manager [req-94e9eb28-9c17-47e4-86bc-3245e18e07b6 req-f5a8c7c5-5abe-47e3-8ee9-ed97103e892c service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-vif-plugged-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1252.315752] env[68279]: DEBUG oslo_concurrency.lockutils [req-94e9eb28-9c17-47e4-86bc-3245e18e07b6 req-f5a8c7c5-5abe-47e3-8ee9-ed97103e892c service nova] Acquiring lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.316067] env[68279]: DEBUG oslo_concurrency.lockutils [req-94e9eb28-9c17-47e4-86bc-3245e18e07b6 req-f5a8c7c5-5abe-47e3-8ee9-ed97103e892c service nova] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.316301] env[68279]: DEBUG oslo_concurrency.lockutils [req-94e9eb28-9c17-47e4-86bc-3245e18e07b6 req-f5a8c7c5-5abe-47e3-8ee9-ed97103e892c service nova] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.316536] env[68279]: DEBUG nova.compute.manager [req-94e9eb28-9c17-47e4-86bc-3245e18e07b6 req-f5a8c7c5-5abe-47e3-8ee9-ed97103e892c service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] No waiting events found dispatching network-vif-plugged-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1252.316776] env[68279]: WARNING nova.compute.manager [req-94e9eb28-9c17-47e4-86bc-3245e18e07b6 req-f5a8c7c5-5abe-47e3-8ee9-ed97103e892c service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received unexpected event network-vif-plugged-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 for instance with vm_state building and task_state spawning. [ 1252.374140] env[68279]: DEBUG nova.network.neutron [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Successfully updated port: a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1252.672309] env[68279]: DEBUG oslo_vmware.api [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964046, 'name': PowerOffVM_Task, 'duration_secs': 0.233676} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.672309] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1252.672640] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.672741] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcb59a5c-f32a-4728-bb32-51d60878bd9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.758536] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1252.758765] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1252.758939] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleting the datastore file [datastore2] 5a7e2125-3310-4fcb-a281-59b0a2c07f67 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1252.759228] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b9236f9-1862-4bd0-9c73-bea66f96cfb1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.767588] env[68279]: DEBUG oslo_vmware.api [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1252.767588] env[68279]: value = "task-2964048" [ 1252.767588] env[68279]: _type = "Task" [ 1252.767588] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.775076] env[68279]: DEBUG oslo_vmware.api [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964048, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.876924] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.876924] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.877147] env[68279]: DEBUG nova.network.neutron [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1253.277682] env[68279]: DEBUG oslo_vmware.api [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964048, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1383} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.277911] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.278113] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.278292] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.278464] env[68279]: INFO nova.compute.manager [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1253.278706] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.278898] env[68279]: DEBUG nova.compute.manager [-] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1253.278993] env[68279]: DEBUG nova.network.neutron [-] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1253.409804] env[68279]: DEBUG nova.network.neutron [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1253.535409] env[68279]: DEBUG nova.network.neutron [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.011985] env[68279]: DEBUG nova.network.neutron [-] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.038192] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.038489] env[68279]: DEBUG nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Instance network_info: |[{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1254.038905] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:0d:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2a64d75-fb3d-4e1a-980b-dbf7a5251115', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1254.046353] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1254.046782] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1254.047017] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66f70ada-a019-4f82-aa8c-eea41fb93da8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.066682] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1254.066682] env[68279]: value = "task-2964049" [ 1254.066682] env[68279]: _type = "Task" [ 1254.066682] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.073857] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964049, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.344923] env[68279]: DEBUG nova.compute.manager [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1254.345183] env[68279]: DEBUG nova.compute.manager [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing instance network info cache due to event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1254.345440] env[68279]: DEBUG oslo_concurrency.lockutils [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.345938] env[68279]: DEBUG oslo_concurrency.lockutils [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.345938] env[68279]: DEBUG nova.network.neutron [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1254.514853] env[68279]: INFO nova.compute.manager [-] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Took 1.24 seconds to deallocate network for instance. [ 1254.576406] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964049, 'name': CreateVM_Task, 'duration_secs': 0.28774} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.576544] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1254.577247] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.577440] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.577770] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1254.578040] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2c08eb5-2ec6-4065-b62b-e9631ea02398 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.582757] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1254.582757] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52250202-47f0-adfa-ec6d-d7c15cc1fe8a" [ 1254.582757] env[68279]: _type = "Task" [ 1254.582757] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.590619] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52250202-47f0-adfa-ec6d-d7c15cc1fe8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.021488] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.021793] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.021965] env[68279]: DEBUG nova.objects.instance [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'resources' on Instance uuid 5a7e2125-3310-4fcb-a281-59b0a2c07f67 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.026324] env[68279]: DEBUG nova.network.neutron [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updated VIF entry in instance network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1255.026649] env[68279]: DEBUG nova.network.neutron [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.093579] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52250202-47f0-adfa-ec6d-d7c15cc1fe8a, 'name': SearchDatastore_Task, 'duration_secs': 0.012477} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.093866] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.094105] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1255.094340] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.094505] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.094691] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1255.094944] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6d9175b-1dfc-4b66-8171-f273c43be24d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.102775] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1255.102946] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1255.103612] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ad4aeca-3254-4ff3-98b7-b3cebc4cbb0e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.107973] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1255.107973] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527b9d8f-e962-231e-0994-17ea3ee36772" [ 1255.107973] env[68279]: _type = "Task" [ 1255.107973] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.116405] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527b9d8f-e962-231e-0994-17ea3ee36772, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.528966] env[68279]: DEBUG oslo_concurrency.lockutils [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.529249] env[68279]: DEBUG nova.compute.manager [req-59677830-1d32-4114-8487-4a47a06c729b req-4727cde3-8c33-4b2e-9c57-5ca77f36e1f8 service nova] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Received event network-vif-deleted-ce309dfa-d75b-46b8-a812-c42760e72418 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1255.602474] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b5936f-c14c-4369-ba85-d54d1d5f2b81 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.612878] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bd0294-e369-4943-85d4-c8adcdbd5ced {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.621122] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527b9d8f-e962-231e-0994-17ea3ee36772, 'name': SearchDatastore_Task, 'duration_secs': 0.007899} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.645489] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-593bcfe9-da93-4093-9227-ffeeaf90adce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.647993] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebb44fb-913f-4054-8cc9-4b3bade1f673 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.652880] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1255.652880] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52010d87-d5ac-51a0-3e36-79a7e8c67150" [ 1255.652880] env[68279]: _type = "Task" [ 1255.652880] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.658397] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7941db49-00c7-45c6-9561-31f620b3c573 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.666642] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52010d87-d5ac-51a0-3e36-79a7e8c67150, 'name': SearchDatastore_Task, 'duration_secs': 0.01004} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.674144] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.674402] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] ec90ec2e-f4a2-4b71-8588-d45a086d9453/ec90ec2e-f4a2-4b71-8588-d45a086d9453.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1255.674829] env[68279]: DEBUG nova.compute.provider_tree [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.676115] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dce55e1-121b-4d8a-8514-807cad6bf526 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.683508] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1255.683508] env[68279]: value = "task-2964050" [ 1255.683508] env[68279]: _type = "Task" [ 1255.683508] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.691252] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.179355] env[68279]: DEBUG nova.scheduler.client.report [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1256.193113] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433948} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.193905] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] ec90ec2e-f4a2-4b71-8588-d45a086d9453/ec90ec2e-f4a2-4b71-8588-d45a086d9453.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1256.194129] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1256.194369] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3eb30b81-5083-4ae8-a0cd-1e4bd3826533 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.201476] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1256.201476] env[68279]: value = "task-2964051" [ 1256.201476] env[68279]: _type = "Task" [ 1256.201476] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.208834] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.684259] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.662s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.701707] env[68279]: INFO nova.scheduler.client.report [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted allocations for instance 5a7e2125-3310-4fcb-a281-59b0a2c07f67 [ 1256.713289] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066498} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.713529] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1256.714277] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760bec21-3eff-4682-8149-a804ac2e8a8d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.735784] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] ec90ec2e-f4a2-4b71-8588-d45a086d9453/ec90ec2e-f4a2-4b71-8588-d45a086d9453.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.736074] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9eca7014-343f-451e-9faa-578a30380886 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.756249] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1256.756249] env[68279]: value = "task-2964052" [ 1256.756249] env[68279]: _type = "Task" [ 1256.756249] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.764174] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964052, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.211723] env[68279]: DEBUG oslo_concurrency.lockutils [None req-df7e498d-ae49-4729-ad26-3bdd161dfd2c tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "5a7e2125-3310-4fcb-a281-59b0a2c07f67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.570s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.266518] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964052, 'name': ReconfigVM_Task, 'duration_secs': 0.277278} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.266821] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Reconfigured VM instance instance-00000079 to attach disk [datastore2] ec90ec2e-f4a2-4b71-8588-d45a086d9453/ec90ec2e-f4a2-4b71-8588-d45a086d9453.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.267449] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbdc3e70-2e2b-41db-857b-3f666a6dc7fb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.275433] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1257.275433] env[68279]: value = "task-2964053" [ 1257.275433] env[68279]: _type = "Task" [ 1257.275433] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.282805] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964053, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.784814] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964053, 'name': Rename_Task, 'duration_secs': 0.133934} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.785078] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.785312] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-415edd6b-cb77-4f6b-b475-2bc5f5652b5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.791170] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1257.791170] env[68279]: value = "task-2964055" [ 1257.791170] env[68279]: _type = "Task" [ 1257.791170] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.799921] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.300697] env[68279]: DEBUG oslo_vmware.api [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964055, 'name': PowerOnVM_Task, 'duration_secs': 0.444952} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.301095] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.301095] env[68279]: INFO nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Took 6.21 seconds to spawn the instance on the hypervisor. [ 1258.301274] env[68279]: DEBUG nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1258.301997] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8762961c-cdfd-470b-9a6f-7dfd0290e3f5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.819391] env[68279]: INFO nova.compute.manager [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Took 11.67 seconds to build instance. [ 1259.244825] env[68279]: DEBUG nova.compute.manager [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1259.244990] env[68279]: DEBUG nova.compute.manager [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing instance network info cache due to event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1259.245234] env[68279]: DEBUG oslo_concurrency.lockutils [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.245379] env[68279]: DEBUG oslo_concurrency.lockutils [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.245539] env[68279]: DEBUG nova.network.neutron [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1259.320880] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b64d8350-4fa3-4caa-90a3-e0c1da66a119 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.178s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.095118] env[68279]: DEBUG nova.network.neutron [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updated VIF entry in instance network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1260.095520] env[68279]: DEBUG nova.network.neutron [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.598936] env[68279]: DEBUG oslo_concurrency.lockutils [req-3a287166-d6b6-44c8-9144-d553e40c3f00 req-ad033fe6-a15a-40bd-bef5-a679162e1ab2 service nova] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.273826] env[68279]: DEBUG nova.compute.manager [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1261.274361] env[68279]: DEBUG nova.compute.manager [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing instance network info cache due to event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1261.274627] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.274809] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.274937] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1261.300681] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.300930] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.301423] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.301624] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.301797] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.304700] env[68279]: INFO nova.compute.manager [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Terminating instance [ 1261.809402] env[68279]: DEBUG nova.compute.manager [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1261.809649] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1261.810606] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75f1fb6-b70f-415f-8009-53143b90a2a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.818786] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1261.819011] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c64ec9c7-ed1f-4698-9ffe-be6697f3e855 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.825186] env[68279]: DEBUG oslo_vmware.api [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1261.825186] env[68279]: value = "task-2964056" [ 1261.825186] env[68279]: _type = "Task" [ 1261.825186] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.833091] env[68279]: DEBUG oslo_vmware.api [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.005501] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updated VIF entry in instance network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1262.005937] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.335363] env[68279]: DEBUG oslo_vmware.api [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964056, 'name': PowerOffVM_Task, 'duration_secs': 0.223694} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.335642] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1262.335821] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1262.336084] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d622395-79c5-4122-9b70-0ca45f432c93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.395815] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1262.396047] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1262.396230] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleting the datastore file [datastore1] 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1262.396481] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a30885d6-9189-48ae-85d1-be48adfeaed6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.403821] env[68279]: DEBUG oslo_vmware.api [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for the task: (returnval){ [ 1262.403821] env[68279]: value = "task-2964058" [ 1262.403821] env[68279]: _type = "Task" [ 1262.403821] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.411099] env[68279]: DEBUG oslo_vmware.api [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.508706] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.508946] env[68279]: DEBUG nova.compute.manager [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1262.509143] env[68279]: DEBUG nova.compute.manager [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing instance network info cache due to event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1262.509355] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.509496] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.509660] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1262.914726] env[68279]: DEBUG oslo_vmware.api [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Task: {'id': task-2964058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123492} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.915149] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1262.915402] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1262.915705] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1262.916015] env[68279]: INFO nova.compute.manager [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1262.916395] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1262.916655] env[68279]: DEBUG nova.compute.manager [-] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1262.916862] env[68279]: DEBUG nova.network.neutron [-] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1263.201603] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updated VIF entry in instance network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1263.201963] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.297269] env[68279]: DEBUG nova.compute.manager [req-658842c7-34bd-4060-8608-9606cea3d8d8 req-b92351ec-83fc-4c61-89ec-ebd66e7c63fd service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Received event network-vif-deleted-7dcc683d-e5ad-49a8-8e28-a1af77590026 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1263.297473] env[68279]: INFO nova.compute.manager [req-658842c7-34bd-4060-8608-9606cea3d8d8 req-b92351ec-83fc-4c61-89ec-ebd66e7c63fd service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Neutron deleted interface 7dcc683d-e5ad-49a8-8e28-a1af77590026; detaching it from the instance and deleting it from the info cache [ 1263.297641] env[68279]: DEBUG nova.network.neutron [req-658842c7-34bd-4060-8608-9606cea3d8d8 req-b92351ec-83fc-4c61-89ec-ebd66e7c63fd service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.663346] env[68279]: DEBUG nova.network.neutron [-] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.704211] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.704461] env[68279]: DEBUG nova.compute.manager [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1263.704728] env[68279]: DEBUG nova.compute.manager [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing instance network info cache due to event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1263.704933] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.705093] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.705257] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1263.800055] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36b7ef0f-081e-4396-9a9b-7a0b8126eb0d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.809985] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99421e9d-995f-4800-b896-3cf9a4b02aa0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.836804] env[68279]: DEBUG nova.compute.manager [req-658842c7-34bd-4060-8608-9606cea3d8d8 req-b92351ec-83fc-4c61-89ec-ebd66e7c63fd service nova] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Detach interface failed, port_id=7dcc683d-e5ad-49a8-8e28-a1af77590026, reason: Instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1264.166674] env[68279]: INFO nova.compute.manager [-] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Took 1.25 seconds to deallocate network for instance. [ 1264.421395] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updated VIF entry in instance network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1264.421805] env[68279]: DEBUG nova.network.neutron [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.674172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.674505] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.674784] env[68279]: DEBUG nova.objects.instance [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lazy-loading 'resources' on Instance uuid 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1264.924973] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea30b3b5-efe4-4967-9f5d-29a0a475c23c req-7b115bb7-7975-42a1-86bd-ad25a5e944fa service nova] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.248649] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb445faf-d5c8-44d7-b188-1048bfb556f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.257317] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1a16ed-1109-4dcb-b8fe-f8c1f296a940 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.287063] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e215bbf-7fe9-447a-ac34-51de19136368 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.293875] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cafd3b4-615d-42a6-84db-e81b875e27ce {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.307082] env[68279]: DEBUG nova.compute.provider_tree [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1265.838738] env[68279]: DEBUG nova.scheduler.client.report [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1265.839016] env[68279]: DEBUG nova.compute.provider_tree [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 163 to 164 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1265.839211] env[68279]: DEBUG nova.compute.provider_tree [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1266.168500] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.343971] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.367045] env[68279]: INFO nova.scheduler.client.report [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Deleted allocations for instance 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1 [ 1266.875595] env[68279]: DEBUG oslo_concurrency.lockutils [None req-61e79f3a-7e15-4ee2-a83d-29afdcf3c550 tempest-ServerActionsTestOtherA-1729354294 tempest-ServerActionsTestOtherA-1729354294-project-member] Lock "298d3bc2-1fad-481f-993b-8d0dc9ed1ed1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.574s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.168015] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.164109] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.167730] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.670333] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.670598] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.670785] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.671008] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1268.672006] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be181ce2-faff-454d-b9c8-b616b223c76f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.681528] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e21abd1-727c-4536-bc27-71971c2e164a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.695673] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecc22b9-ec34-4123-8127-60de7087ec71 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.701623] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d008153-cdcc-4d98-8328-5d5e40698d9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.729866] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179885MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1268.730027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.730244] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.757935] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 84b2828a-e62c-45b2-a5ee-067ca66e626b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1269.757935] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e246ae0f-1679-4757-acf2-ef5239f3c36d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1269.758270] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance a4d4e9c0-0165-4c11-ba98-1214e70b91a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1269.758270] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance ec90ec2e-f4a2-4b71-8588-d45a086d9453 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1269.758270] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1269.758432] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1269.822195] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9fadce-0ef8-4a0a-91bb-98f807102470 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.829902] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cc0c83-e631-4d5c-b5c7-9af8e90df8d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.861432] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3eed94-2bc5-4095-9a57-5f61797c2ee5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.869510] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d29fe68-5fc8-4aeb-b052-ce2087f8957c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.882689] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.385617] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.892453] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1270.892453] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.162s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.893311] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1271.893311] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1272.858337] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "04f42241-5598-47e2-906c-998a19da434f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.858721] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.168832] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.361842] env[68279]: DEBUG nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1273.885589] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.886033] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.887591] env[68279]: INFO nova.compute.claims [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1274.659893] env[68279]: INFO nova.compute.manager [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Rebuilding instance [ 1274.696742] env[68279]: DEBUG nova.compute.manager [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1274.697673] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37abb0c-900f-4bcb-ab0b-6fa1d127bd94 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.961017] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b360a0c3-2ae6-49d5-97ce-3832f6a8bf27 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.968717] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95af04b3-540b-4968-b0c7-7c382de00c3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.000197] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c843d7b-776f-4cd7-ad3d-9b1d45a6b0d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.007203] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8480a8fb-5aef-4ed3-8c09-510dcf66b3e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.020113] env[68279]: DEBUG nova.compute.provider_tree [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1275.164194] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.523103] env[68279]: DEBUG nova.scheduler.client.report [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1275.669584] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.669876] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.710533] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1275.710859] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9691364-c8a7-48be-8d0b-a6175a14ff56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.719897] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1275.719897] env[68279]: value = "task-2964059" [ 1275.719897] env[68279]: _type = "Task" [ 1275.719897] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.728405] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964059, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.028016] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.142s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.028277] env[68279]: DEBUG nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1276.229034] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964059, 'name': PowerOffVM_Task, 'duration_secs': 0.162907} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.229351] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.229723] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1276.230852] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f235dc50-5a6a-46a5-a45b-57ea363fba28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.237599] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1276.237824] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43930b26-9426-4c89-873c-6cc080448052 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.299087] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1276.299318] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1276.299505] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleting the datastore file [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.299762] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c46c487-7f9b-4d90-9bcc-359b6707b048 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.305979] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1276.305979] env[68279]: value = "task-2964061" [ 1276.305979] env[68279]: _type = "Task" [ 1276.305979] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.313254] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.533078] env[68279]: DEBUG nova.compute.utils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1276.534545] env[68279]: DEBUG nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1276.535332] env[68279]: DEBUG nova.network.neutron [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1276.577647] env[68279]: DEBUG nova.policy [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0f0f631a27f4d93bcc70956d721d9ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c1ad13d5de94b14ab00b7f003c1851d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1276.817211] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145682} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.817584] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1276.817784] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1276.817952] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1276.833946] env[68279]: DEBUG nova.network.neutron [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Successfully created port: a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1277.038246] env[68279]: DEBUG nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1277.852194] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1277.852481] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.852608] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1277.852809] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.852957] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1277.853127] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1277.853334] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1277.853494] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1277.853660] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1277.853823] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1277.854015] env[68279]: DEBUG nova.virt.hardware [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1277.854921] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99f5eb2-92df-4778-b993-e26bcec412b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.863229] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfbe6ec-6f78-4a29-9e59-7f514786bd2f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.877104] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:46:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '051f343d-ac4f-4070-a26d-467603122c81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f9fd296-d86e-4b84-9e0b-1037b955ee7f', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1277.884406] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1277.884625] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1277.884848] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb0469f1-8a54-4d6e-89cd-f88362caf457 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.903685] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1277.903685] env[68279]: value = "task-2964062" [ 1277.903685] env[68279]: _type = "Task" [ 1277.903685] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.910711] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964062, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.048472] env[68279]: DEBUG nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1278.069763] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1278.070052] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.070258] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1278.070457] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.070608] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1278.070760] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1278.071065] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1278.071155] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1278.071332] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1278.071504] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1278.071677] env[68279]: DEBUG nova.virt.hardware [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1278.072533] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30916bb3-a85f-4f7f-a88a-6ea419249b28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.080239] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852d9153-5e97-430b-9193-b5e3575b3255 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.184239] env[68279]: DEBUG nova.compute.manager [req-7a2b1413-d0e2-478a-a164-8ad87a76c980 req-5df1b031-7a04-4ae3-a8c5-0f5560121235 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Received event network-vif-plugged-a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1278.184607] env[68279]: DEBUG oslo_concurrency.lockutils [req-7a2b1413-d0e2-478a-a164-8ad87a76c980 req-5df1b031-7a04-4ae3-a8c5-0f5560121235 service nova] Acquiring lock "04f42241-5598-47e2-906c-998a19da434f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1278.184738] env[68279]: DEBUG oslo_concurrency.lockutils [req-7a2b1413-d0e2-478a-a164-8ad87a76c980 req-5df1b031-7a04-4ae3-a8c5-0f5560121235 service nova] Lock "04f42241-5598-47e2-906c-998a19da434f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.184985] env[68279]: DEBUG oslo_concurrency.lockutils [req-7a2b1413-d0e2-478a-a164-8ad87a76c980 req-5df1b031-7a04-4ae3-a8c5-0f5560121235 service nova] Lock "04f42241-5598-47e2-906c-998a19da434f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.185214] env[68279]: DEBUG nova.compute.manager [req-7a2b1413-d0e2-478a-a164-8ad87a76c980 req-5df1b031-7a04-4ae3-a8c5-0f5560121235 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] No waiting events found dispatching network-vif-plugged-a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1278.185435] env[68279]: WARNING nova.compute.manager [req-7a2b1413-d0e2-478a-a164-8ad87a76c980 req-5df1b031-7a04-4ae3-a8c5-0f5560121235 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Received unexpected event network-vif-plugged-a2750b33-4e6b-438f-8a02-3c67fc83b7d7 for instance with vm_state building and task_state spawning. [ 1278.266477] env[68279]: DEBUG nova.network.neutron [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Successfully updated port: a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1278.413595] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964062, 'name': CreateVM_Task, 'duration_secs': 0.33218} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.413782] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1278.414424] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.414593] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.414936] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1278.415208] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c282c84-26e8-4637-aed1-0842174d3c1e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.419391] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1278.419391] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529aa8fa-9101-7d01-3beb-a64d30624a89" [ 1278.419391] env[68279]: _type = "Task" [ 1278.419391] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.426693] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529aa8fa-9101-7d01-3beb-a64d30624a89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.773150] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.773150] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.773150] env[68279]: DEBUG nova.network.neutron [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1278.930451] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529aa8fa-9101-7d01-3beb-a64d30624a89, 'name': SearchDatastore_Task, 'duration_secs': 0.009499} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.930852] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1278.930962] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1278.931219] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.931366] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.931540] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1278.931793] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15323b29-b2d9-4347-bb1f-035738fa4bf7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.939739] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1278.939859] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1278.940540] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c431362-1c44-408e-a26d-90efd77e9695 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.945548] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1278.945548] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c2b25-7cdb-6ccb-4ae6-9503754d811d" [ 1278.945548] env[68279]: _type = "Task" [ 1278.945548] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.952932] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c2b25-7cdb-6ccb-4ae6-9503754d811d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.303930] env[68279]: DEBUG nova.network.neutron [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1279.419729] env[68279]: DEBUG nova.network.neutron [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.456924] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527c2b25-7cdb-6ccb-4ae6-9503754d811d, 'name': SearchDatastore_Task, 'duration_secs': 0.008738} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.457696] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6a23bf7-9b3d-40c2-b364-197ed3128289 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.462433] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1279.462433] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52064351-5873-08bb-7d21-3e29d5519c14" [ 1279.462433] env[68279]: _type = "Task" [ 1279.462433] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.469577] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52064351-5873-08bb-7d21-3e29d5519c14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.923066] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.923268] env[68279]: DEBUG nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Instance network_info: |[{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1279.923670] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:72:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2750b33-4e6b-438f-8a02-3c67fc83b7d7', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1279.932596] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating folder: Project (7c1ad13d5de94b14ab00b7f003c1851d). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1279.932958] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eacc48a4-1078-4764-a453-fc09d975b3e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.943784] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Created folder: Project (7c1ad13d5de94b14ab00b7f003c1851d) in parent group-v594445. [ 1279.943966] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating folder: Instances. Parent ref: group-v594769. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1279.944216] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0b8c59f-39b2-46f9-bcce-136211aca238 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.952637] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Created folder: Instances in parent group-v594769. [ 1279.952890] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1279.953091] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04f42241-5598-47e2-906c-998a19da434f] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1279.953285] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11cb6103-887a-4074-90c1-1e34766809e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.975275] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52064351-5873-08bb-7d21-3e29d5519c14, 'name': SearchDatastore_Task, 'duration_secs': 0.009424} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.976360] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.976614] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1279.976826] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1279.976826] env[68279]: value = "task-2964065" [ 1279.976826] env[68279]: _type = "Task" [ 1279.976826] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.977021] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9e022f9-43fa-46e9-bf95-81192dde9613 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.986077] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964065, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.987171] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1279.987171] env[68279]: value = "task-2964066" [ 1279.987171] env[68279]: _type = "Task" [ 1279.987171] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.993760] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.143995] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-a4d4e9c0-0165-4c11-ba98-1214e70b91a3-edf36923-fdfe-4c55-b0ca-33b41b182d18" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.144357] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-a4d4e9c0-0165-4c11-ba98-1214e70b91a3-edf36923-fdfe-4c55-b0ca-33b41b182d18" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.144892] env[68279]: DEBUG nova.objects.instance [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'flavor' on Instance uuid a4d4e9c0-0165-4c11-ba98-1214e70b91a3 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.218029] env[68279]: DEBUG nova.compute.manager [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Received event network-changed-a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1280.218207] env[68279]: DEBUG nova.compute.manager [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Refreshing instance network info cache due to event network-changed-a2750b33-4e6b-438f-8a02-3c67fc83b7d7. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1280.218478] env[68279]: DEBUG oslo_concurrency.lockutils [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] Acquiring lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.218586] env[68279]: DEBUG oslo_concurrency.lockutils [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] Acquired lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.218716] env[68279]: DEBUG nova.network.neutron [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Refreshing network info cache for port a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1280.488793] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964065, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.496520] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457469} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.496772] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1280.496981] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1280.497318] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fcb9c73a-1c93-4890-af21-0b30be20ace6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.503079] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1280.503079] env[68279]: value = "task-2964067" [ 1280.503079] env[68279]: _type = "Task" [ 1280.503079] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.511356] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.729383] env[68279]: DEBUG nova.objects.instance [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'pci_requests' on Instance uuid a4d4e9c0-0165-4c11-ba98-1214e70b91a3 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.966224] env[68279]: DEBUG nova.network.neutron [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updated VIF entry in instance network info cache for port a2750b33-4e6b-438f-8a02-3c67fc83b7d7. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1280.966601] env[68279]: DEBUG nova.network.neutron [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.988367] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964065, 'name': CreateVM_Task, 'duration_secs': 0.545107} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.988547] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04f42241-5598-47e2-906c-998a19da434f] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1280.989264] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.989496] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.989839] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1280.990129] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97488caf-5ca2-4ff8-bf3c-5c2ee5bb37a9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.994459] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1280.994459] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d107fc-a7bf-9247-1bf8-8ebaa20d107d" [ 1280.994459] env[68279]: _type = "Task" [ 1280.994459] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.002072] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d107fc-a7bf-9247-1bf8-8ebaa20d107d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.011114] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077634} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.011351] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1281.012081] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dd266c-3974-4976-b3e5-095da9d1ca28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.033094] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1281.033310] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba0bcc9e-c3f8-4e00-b352-b744fc679f38 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.051021] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1281.051021] env[68279]: value = "task-2964068" [ 1281.051021] env[68279]: _type = "Task" [ 1281.051021] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.058269] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.233743] env[68279]: DEBUG nova.objects.base [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1281.233970] env[68279]: DEBUG nova.network.neutron [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1281.294773] env[68279]: DEBUG nova.policy [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1281.469783] env[68279]: DEBUG oslo_concurrency.lockutils [req-b1ebc04d-0814-4439-bd27-355b87a0b776 req-20934e3f-f21a-42a6-b205-9edde48d1995 service nova] Releasing lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.505263] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d107fc-a7bf-9247-1bf8-8ebaa20d107d, 'name': SearchDatastore_Task, 'duration_secs': 0.009515} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.505616] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.505841] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.506075] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.506225] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.506401] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.506654] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f5b30b5-11b3-452d-b914-abaf68159cd3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.515810] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.515880] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1281.516546] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76a430de-d680-40cf-abe8-71fcd267c929 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.521342] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1281.521342] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a283ee-e70b-2c97-70d1-51cfc9d18b2a" [ 1281.521342] env[68279]: _type = "Task" [ 1281.521342] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.528919] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a283ee-e70b-2c97-70d1-51cfc9d18b2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.559267] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964068, 'name': ReconfigVM_Task, 'duration_secs': 0.261706} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.559504] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Reconfigured VM instance instance-00000077 to attach disk [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d/e246ae0f-1679-4757-acf2-ef5239f3c36d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1281.560117] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e11ea02-1afb-4d48-81b5-f37f600668a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.565839] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1281.565839] env[68279]: value = "task-2964069" [ 1281.565839] env[68279]: _type = "Task" [ 1281.565839] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.573049] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964069, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.031596] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52a283ee-e70b-2c97-70d1-51cfc9d18b2a, 'name': SearchDatastore_Task, 'duration_secs': 0.008627} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.032392] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-053ffed8-6be4-413d-9008-f58b8eb7ba80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.037371] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1282.037371] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521c691c-8753-ad00-cc59-33f3c0d808b4" [ 1282.037371] env[68279]: _type = "Task" [ 1282.037371] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.044544] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521c691c-8753-ad00-cc59-33f3c0d808b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.074067] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964069, 'name': Rename_Task, 'duration_secs': 0.126958} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.074952] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1282.074952] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eed3326d-74c7-43b6-a7aa-e180158ddf57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.080250] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1282.080250] env[68279]: value = "task-2964070" [ 1282.080250] env[68279]: _type = "Task" [ 1282.080250] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.087192] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.548929] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521c691c-8753-ad00-cc59-33f3c0d808b4, 'name': SearchDatastore_Task, 'duration_secs': 0.009652} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.549186] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.549440] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 04f42241-5598-47e2-906c-998a19da434f/04f42241-5598-47e2-906c-998a19da434f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1282.549692] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57dcbe1a-d31b-4063-8ee0-ec678f7b5394 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.555331] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1282.555331] env[68279]: value = "task-2964071" [ 1282.555331] env[68279]: _type = "Task" [ 1282.555331] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.562841] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.588828] env[68279]: DEBUG oslo_vmware.api [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964070, 'name': PowerOnVM_Task, 'duration_secs': 0.427517} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.589104] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1282.589343] env[68279]: DEBUG nova.compute.manager [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1282.590111] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb47be4-e6d2-4a48-bb95-157948b36072 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.634103] env[68279]: DEBUG nova.compute.manager [req-ed0620ef-56ae-4ef8-8639-d87f7fd65204 req-5cf82bbc-147b-4dfd-83b5-810ef42155d3 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-vif-plugged-edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1282.634103] env[68279]: DEBUG oslo_concurrency.lockutils [req-ed0620ef-56ae-4ef8-8639-d87f7fd65204 req-5cf82bbc-147b-4dfd-83b5-810ef42155d3 service nova] Acquiring lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.634103] env[68279]: DEBUG oslo_concurrency.lockutils [req-ed0620ef-56ae-4ef8-8639-d87f7fd65204 req-5cf82bbc-147b-4dfd-83b5-810ef42155d3 service nova] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1282.634103] env[68279]: DEBUG oslo_concurrency.lockutils [req-ed0620ef-56ae-4ef8-8639-d87f7fd65204 req-5cf82bbc-147b-4dfd-83b5-810ef42155d3 service nova] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.634103] env[68279]: DEBUG nova.compute.manager [req-ed0620ef-56ae-4ef8-8639-d87f7fd65204 req-5cf82bbc-147b-4dfd-83b5-810ef42155d3 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] No waiting events found dispatching network-vif-plugged-edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1282.634383] env[68279]: WARNING nova.compute.manager [req-ed0620ef-56ae-4ef8-8639-d87f7fd65204 req-5cf82bbc-147b-4dfd-83b5-810ef42155d3 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received unexpected event network-vif-plugged-edf36923-fdfe-4c55-b0ca-33b41b182d18 for instance with vm_state active and task_state None. [ 1282.748519] env[68279]: DEBUG nova.network.neutron [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Successfully updated port: edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1283.065519] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456451} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.065804] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] 04f42241-5598-47e2-906c-998a19da434f/04f42241-5598-47e2-906c-998a19da434f.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1283.065994] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1283.066255] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92bacde9-519e-4c99-8ab9-2dea7932c633 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.072440] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1283.072440] env[68279]: value = "task-2964072" [ 1283.072440] env[68279]: _type = "Task" [ 1283.072440] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.080192] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964072, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.107908] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.108144] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.108434] env[68279]: DEBUG nova.objects.instance [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68279) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1283.251214] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.251391] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.252023] env[68279]: DEBUG nova.network.neutron [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1283.582425] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964072, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066848} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.582698] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1283.583411] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60f758f-2619-4132-8139-f19c6585559b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.604803] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 04f42241-5598-47e2-906c-998a19da434f/04f42241-5598-47e2-906c-998a19da434f.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1283.605039] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a11148f-5743-405d-b55e-f80c79f6135a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.626423] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1283.626423] env[68279]: value = "task-2964073" [ 1283.626423] env[68279]: _type = "Task" [ 1283.626423] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.634942] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964073, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.785742] env[68279]: WARNING nova.network.neutron [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] c0150443-bd24-4fb2-8b27-b118d22250c3 already exists in list: networks containing: ['c0150443-bd24-4fb2-8b27-b118d22250c3']. ignoring it [ 1284.039280] env[68279]: DEBUG nova.network.neutron [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "address": "fa:16:3e:b2:f0:ce", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf36923-fd", "ovs_interfaceid": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.124019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-1b4a7c44-7ab8-4a3b-8c11-c0231e914d0d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.136834] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964073, 'name': ReconfigVM_Task, 'duration_secs': 0.251838} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.137117] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 04f42241-5598-47e2-906c-998a19da434f/04f42241-5598-47e2-906c-998a19da434f.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1284.137732] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-574d60dd-8c06-4ba0-b45e-4dd52f3e7233 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.144013] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1284.144013] env[68279]: value = "task-2964074" [ 1284.144013] env[68279]: _type = "Task" [ 1284.144013] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.156046] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964074, 'name': Rename_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.542342] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.543027] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.543191] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.544063] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57ace89-0d87-429d-b5a8-5f2026844174 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.561305] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1284.561527] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1284.561685] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1284.561861] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1284.562011] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1284.562176] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1284.562463] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1284.562633] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1284.562802] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1284.562964] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1284.563154] env[68279]: DEBUG nova.virt.hardware [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1284.569462] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Reconfiguring VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1284.569737] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3a8bfc7-305a-44f9-a2a6-9b7fae295858 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.586498] env[68279]: DEBUG oslo_vmware.api [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1284.586498] env[68279]: value = "task-2964075" [ 1284.586498] env[68279]: _type = "Task" [ 1284.586498] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.594566] env[68279]: DEBUG oslo_vmware.api [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964075, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.653523] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964074, 'name': Rename_Task, 'duration_secs': 0.15049} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.653795] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1284.654133] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77e3ec31-6e2f-43b2-b6d5-d3b7ddf44d22 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.658907] env[68279]: DEBUG nova.compute.manager [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-changed-edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1284.659118] env[68279]: DEBUG nova.compute.manager [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing instance network info cache due to event network-changed-edf36923-fdfe-4c55-b0ca-33b41b182d18. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1284.659377] env[68279]: DEBUG oslo_concurrency.lockutils [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.659544] env[68279]: DEBUG oslo_concurrency.lockutils [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.659679] env[68279]: DEBUG nova.network.neutron [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing network info cache for port edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1284.664016] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1284.664016] env[68279]: value = "task-2964076" [ 1284.664016] env[68279]: _type = "Task" [ 1284.664016] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.670218] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964076, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.099608] env[68279]: DEBUG oslo_vmware.api [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.174327] env[68279]: DEBUG oslo_vmware.api [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964076, 'name': PowerOnVM_Task, 'duration_secs': 0.433378} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.174906] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1285.174906] env[68279]: INFO nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Took 7.13 seconds to spawn the instance on the hypervisor. [ 1285.175091] env[68279]: DEBUG nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1285.175745] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea07b21-99c1-4b80-90ba-941730d0ad31 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.402116] env[68279]: DEBUG nova.network.neutron [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updated VIF entry in instance network info cache for port edf36923-fdfe-4c55-b0ca-33b41b182d18. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1285.402601] env[68279]: DEBUG nova.network.neutron [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "address": "fa:16:3e:b2:f0:ce", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf36923-fd", "ovs_interfaceid": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.598191] env[68279]: DEBUG oslo_vmware.api [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964075, 'name': ReconfigVM_Task, 'duration_secs': 0.628835} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.598191] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.598191] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Reconfigured VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1285.694462] env[68279]: INFO nova.compute.manager [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Took 11.83 seconds to build instance. [ 1285.880165] env[68279]: DEBUG nova.compute.manager [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Received event network-changed-a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1285.880349] env[68279]: DEBUG nova.compute.manager [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Refreshing instance network info cache due to event network-changed-a2750b33-4e6b-438f-8a02-3c67fc83b7d7. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1285.880563] env[68279]: DEBUG oslo_concurrency.lockutils [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] Acquiring lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.880707] env[68279]: DEBUG oslo_concurrency.lockutils [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] Acquired lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.880864] env[68279]: DEBUG nova.network.neutron [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Refreshing network info cache for port a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1285.906651] env[68279]: DEBUG oslo_concurrency.lockutils [req-0255e280-ce68-4c34-ab41-10c81552785c req-790d0e43-355a-4973-b5ba-f2d2b859fdc2 service nova] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.103092] env[68279]: DEBUG oslo_concurrency.lockutils [None req-cd05c284-0054-4340-a000-8633c5c59781 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-a4d4e9c0-0165-4c11-ba98-1214e70b91a3-edf36923-fdfe-4c55-b0ca-33b41b182d18" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.958s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.196571] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fdc05576-05a0-44c4-9747-1ecd92a3608b tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.338s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.583357] env[68279]: DEBUG nova.network.neutron [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updated VIF entry in instance network info cache for port a2750b33-4e6b-438f-8a02-3c67fc83b7d7. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1286.583576] env[68279]: DEBUG nova.network.neutron [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.086815] env[68279]: DEBUG oslo_concurrency.lockutils [req-349c2d95-342d-449c-8b88-df3183fc5f40 req-61962435-1eb6-4bf7-95e4-b23a8ccb6a93 service nova] Releasing lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.338741] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-a4d4e9c0-0165-4c11-ba98-1214e70b91a3-edf36923-fdfe-4c55-b0ca-33b41b182d18" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.339059] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-a4d4e9c0-0165-4c11-ba98-1214e70b91a3-edf36923-fdfe-4c55-b0ca-33b41b182d18" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.842228] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.842445] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.843371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7298c92-e9f6-479d-923c-2eaa97f11777 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.861215] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a4660b-3104-43d0-a7b3-00f9f95cb637 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.888227] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Reconfiguring VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1287.888496] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1a41d5c-3fd5-49f2-a260-d383efb427ff {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.906863] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1287.906863] env[68279]: value = "task-2964077" [ 1287.906863] env[68279]: _type = "Task" [ 1287.906863] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.914484] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.416731] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.916828] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.417370] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.918685] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.418119] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.919947] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.419802] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.920367] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.421727] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.922629] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.422710] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.923215] env[68279]: DEBUG oslo_vmware.api [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964077, 'name': ReconfigVM_Task, 'duration_secs': 5.763974} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.923468] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.923660] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Reconfigured VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1295.185692] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.185970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1295.186032] env[68279]: DEBUG nova.network.neutron [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1295.953195] env[68279]: INFO nova.network.neutron [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Port edf36923-fdfe-4c55-b0ca-33b41b182d18 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1295.953569] env[68279]: DEBUG nova.network.neutron [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.004401] env[68279]: DEBUG nova.compute.manager [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.004714] env[68279]: DEBUG nova.compute.manager [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing instance network info cache due to event network-changed-e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1296.005081] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] Acquiring lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.455665] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.457835] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] Acquired lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.458053] env[68279]: DEBUG nova.network.neutron [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Refreshing network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1296.684456] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-ec90ec2e-f4a2-4b71-8588-d45a086d9453-edf36923-fdfe-4c55-b0ca-33b41b182d18" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.684757] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-ec90ec2e-f4a2-4b71-8588-d45a086d9453-edf36923-fdfe-4c55-b0ca-33b41b182d18" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.685159] env[68279]: DEBUG nova.objects.instance [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'flavor' on Instance uuid ec90ec2e-f4a2-4b71-8588-d45a086d9453 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1296.963363] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e0dc5e98-7dae-421b-82f3-55317ed8f56a tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-a4d4e9c0-0165-4c11-ba98-1214e70b91a3-edf36923-fdfe-4c55-b0ca-33b41b182d18" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.624s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.142597] env[68279]: DEBUG nova.network.neutron [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updated VIF entry in instance network info cache for port e9c7f070-ce09-4503-a0e5-76cfe063bc35. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1297.142955] env[68279]: DEBUG nova.network.neutron [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [{"id": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "address": "fa:16:3e:1d:f2:df", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c7f070-ce", "ovs_interfaceid": "e9c7f070-ce09-4503-a0e5-76cfe063bc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.273107] env[68279]: DEBUG nova.objects.instance [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'pci_requests' on Instance uuid ec90ec2e-f4a2-4b71-8588-d45a086d9453 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1297.645455] env[68279]: DEBUG oslo_concurrency.lockutils [req-dfa65863-e961-42a7-8029-ad70ddd00782 req-bca56578-bfd4-4b8e-8080-5ba24f340986 service nova] Releasing lock "refresh_cache-a4d4e9c0-0165-4c11-ba98-1214e70b91a3" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.775602] env[68279]: DEBUG nova.objects.base [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1297.775842] env[68279]: DEBUG nova.network.neutron [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1297.834875] env[68279]: DEBUG nova.policy [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b398955e72f64cf09f78351e69dba445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c765e6d99a8f47d6b932e30e05e54405', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1298.035249] env[68279]: DEBUG nova.compute.manager [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.035449] env[68279]: DEBUG nova.compute.manager [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing instance network info cache due to event network-changed-a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1298.035686] env[68279]: DEBUG oslo_concurrency.lockutils [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.035831] env[68279]: DEBUG oslo_concurrency.lockutils [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.036013] env[68279]: DEBUG nova.network.neutron [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1298.726760] env[68279]: DEBUG nova.network.neutron [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updated VIF entry in instance network info cache for port a2a64d75-fb3d-4e1a-980b-dbf7a5251115. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1298.727126] env[68279]: DEBUG nova.network.neutron [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.230328] env[68279]: DEBUG oslo_concurrency.lockutils [req-e85ed4f6-67b1-4351-97ae-c2fa1ec7eec8 req-8e80c777-cb8c-4de8-b6cb-5c209b8dad39 service nova] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.257475] env[68279]: DEBUG nova.network.neutron [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Successfully updated port: edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1299.761323] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.761323] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.761323] env[68279]: DEBUG nova.network.neutron [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1300.060723] env[68279]: DEBUG nova.compute.manager [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-vif-plugged-edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1300.060941] env[68279]: DEBUG oslo_concurrency.lockutils [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] Acquiring lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.061157] env[68279]: DEBUG oslo_concurrency.lockutils [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.061327] env[68279]: DEBUG oslo_concurrency.lockutils [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.061495] env[68279]: DEBUG nova.compute.manager [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] No waiting events found dispatching network-vif-plugged-edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1300.061664] env[68279]: WARNING nova.compute.manager [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received unexpected event network-vif-plugged-edf36923-fdfe-4c55-b0ca-33b41b182d18 for instance with vm_state active and task_state None. [ 1300.061817] env[68279]: DEBUG nova.compute.manager [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-changed-edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1300.061967] env[68279]: DEBUG nova.compute.manager [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing instance network info cache due to event network-changed-edf36923-fdfe-4c55-b0ca-33b41b182d18. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1300.062141] env[68279]: DEBUG oslo_concurrency.lockutils [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.300248] env[68279]: WARNING nova.network.neutron [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] c0150443-bd24-4fb2-8b27-b118d22250c3 already exists in list: networks containing: ['c0150443-bd24-4fb2-8b27-b118d22250c3']. ignoring it [ 1300.544728] env[68279]: DEBUG nova.network.neutron [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "address": "fa:16:3e:b2:f0:ce", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf36923-fd", "ovs_interfaceid": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.047933] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.048637] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.048802] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.049109] env[68279]: DEBUG oslo_concurrency.lockutils [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.049320] env[68279]: DEBUG nova.network.neutron [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Refreshing network info cache for port edf36923-fdfe-4c55-b0ca-33b41b182d18 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1301.050989] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d352a35e-85e1-4df2-aa2b-0f6eca4b7a6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.067905] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1301.068146] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.068309] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1301.068494] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.068639] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1301.068784] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1301.068984] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1301.069158] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1301.069327] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1301.069517] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1301.069874] env[68279]: DEBUG nova.virt.hardware [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1301.075871] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Reconfiguring VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1301.076792] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b936c132-76a7-4e45-95c9-e4e55b77ef8c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.093946] env[68279]: DEBUG oslo_vmware.api [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1301.093946] env[68279]: value = "task-2964078" [ 1301.093946] env[68279]: _type = "Task" [ 1301.093946] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.101519] env[68279]: DEBUG oslo_vmware.api [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964078, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.604070] env[68279]: DEBUG oslo_vmware.api [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964078, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.738939] env[68279]: DEBUG nova.network.neutron [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updated VIF entry in instance network info cache for port edf36923-fdfe-4c55-b0ca-33b41b182d18. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1301.739343] env[68279]: DEBUG nova.network.neutron [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "address": "fa:16:3e:b2:f0:ce", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf36923-fd", "ovs_interfaceid": "edf36923-fdfe-4c55-b0ca-33b41b182d18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.104466] env[68279]: DEBUG oslo_vmware.api [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964078, 'name': ReconfigVM_Task, 'duration_secs': 0.515448} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.104970] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.105220] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Reconfigured VM to attach interface {{(pid=68279) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1302.242083] env[68279]: DEBUG oslo_concurrency.lockutils [req-135850dc-ba70-461a-a492-ae773fb7104f req-e47121a8-3c90-4c1c-9ef1-f79587df6bf7 service nova] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.610291] env[68279]: DEBUG oslo_concurrency.lockutils [None req-fa3bd908-3169-4236-83ed-c566f6f7d343 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-ec90ec2e-f4a2-4b71-8588-d45a086d9453-edf36923-fdfe-4c55-b0ca-33b41b182d18" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.925s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.863962] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "interface-ec90ec2e-f4a2-4b71-8588-d45a086d9453-edf36923-fdfe-4c55-b0ca-33b41b182d18" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.864290] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-ec90ec2e-f4a2-4b71-8588-d45a086d9453-edf36923-fdfe-4c55-b0ca-33b41b182d18" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.367643] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.367973] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.368748] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d503a35a-868d-4d6b-b649-af427780d154 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.386826] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d82640-31b6-4fa4-aa4b-d2332f90dfcd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.412365] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Reconfiguring VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1304.412597] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34d0678c-1e8c-4169-a96a-cb741a185387 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.432162] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1304.432162] env[68279]: value = "task-2964079" [ 1304.432162] env[68279]: _type = "Task" [ 1304.432162] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.439631] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.941497] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.443299] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.945046] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.444108] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.944505] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.445753] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.947182] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.448058] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.948922] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.449546] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.950259] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.451417] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.952537] env[68279]: DEBUG oslo_vmware.api [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964079, 'name': ReconfigVM_Task, 'duration_secs': 6.059286} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.952835] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1310.952986] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Reconfigured VM to detach interface {{(pid=68279) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1312.206113] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.206504] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquired lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.206504] env[68279]: DEBUG nova.network.neutron [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1312.236704] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.236953] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.237161] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.237346] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.237511] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.240819] env[68279]: INFO nova.compute.manager [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Terminating instance [ 1312.745479] env[68279]: DEBUG nova.compute.manager [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1312.745699] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1312.746621] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687e32eb-b414-4078-a62d-824bbe3a84ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.754382] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1312.754607] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cedaae34-9bb3-4c26-a5e9-31271488da09 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.761029] env[68279]: DEBUG oslo_vmware.api [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1312.761029] env[68279]: value = "task-2964080" [ 1312.761029] env[68279]: _type = "Task" [ 1312.761029] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.771959] env[68279]: DEBUG oslo_vmware.api [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964080, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.964520] env[68279]: INFO nova.network.neutron [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Port edf36923-fdfe-4c55-b0ca-33b41b182d18 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1312.964920] env[68279]: DEBUG nova.network.neutron [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [{"id": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "address": "fa:16:3e:49:0d:25", "network": {"id": "c0150443-bd24-4fb2-8b27-b118d22250c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1688489502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c765e6d99a8f47d6b932e30e05e54405", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2a64d75-fb", "ovs_interfaceid": "a2a64d75-fb3d-4e1a-980b-dbf7a5251115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.271543] env[68279]: DEBUG oslo_vmware.api [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964080, 'name': PowerOffVM_Task, 'duration_secs': 0.197563} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.271868] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1313.272070] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1313.272320] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6fd2c467-73fc-443f-bf26-c95ac45466d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.330938] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1313.331260] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1313.331459] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleting the datastore file [datastore2] ec90ec2e-f4a2-4b71-8588-d45a086d9453 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1313.331727] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-682ef6a4-ea6b-44f7-975d-cab87229c0a5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.339460] env[68279]: DEBUG oslo_vmware.api [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1313.339460] env[68279]: value = "task-2964082" [ 1313.339460] env[68279]: _type = "Task" [ 1313.339460] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.346980] env[68279]: DEBUG oslo_vmware.api [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.467828] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Releasing lock "refresh_cache-ec90ec2e-f4a2-4b71-8588-d45a086d9453" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.849312] env[68279]: DEBUG oslo_vmware.api [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150311} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.849550] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1313.849723] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1313.849901] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1313.850086] env[68279]: INFO nova.compute.manager [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1313.850333] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1313.850527] env[68279]: DEBUG nova.compute.manager [-] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1313.850622] env[68279]: DEBUG nova.network.neutron [-] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1313.972103] env[68279]: DEBUG oslo_concurrency.lockutils [None req-68b47c33-5427-4de4-aa86-e9f5af631af7 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "interface-ec90ec2e-f4a2-4b71-8588-d45a086d9453-edf36923-fdfe-4c55-b0ca-33b41b182d18" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.108s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.490016] env[68279]: DEBUG nova.compute.manager [req-e1054776-2cb1-4e39-83c9-19b889a92ed0 req-eb32400e-b0d5-42cb-882c-a32b17083a96 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Received event network-vif-deleted-a2a64d75-fb3d-4e1a-980b-dbf7a5251115 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1314.490253] env[68279]: INFO nova.compute.manager [req-e1054776-2cb1-4e39-83c9-19b889a92ed0 req-eb32400e-b0d5-42cb-882c-a32b17083a96 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Neutron deleted interface a2a64d75-fb3d-4e1a-980b-dbf7a5251115; detaching it from the instance and deleting it from the info cache [ 1314.490405] env[68279]: DEBUG nova.network.neutron [req-e1054776-2cb1-4e39-83c9-19b889a92ed0 req-eb32400e-b0d5-42cb-882c-a32b17083a96 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.975134] env[68279]: DEBUG nova.network.neutron [-] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.992953] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f488af5-21ff-4628-b1a2-1eda3dd3d9f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.002745] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2571a3a4-2fa9-432c-8a9b-d4ba446cda74 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.029699] env[68279]: DEBUG nova.compute.manager [req-e1054776-2cb1-4e39-83c9-19b889a92ed0 req-eb32400e-b0d5-42cb-882c-a32b17083a96 service nova] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Detach interface failed, port_id=a2a64d75-fb3d-4e1a-980b-dbf7a5251115, reason: Instance ec90ec2e-f4a2-4b71-8588-d45a086d9453 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1315.478396] env[68279]: INFO nova.compute.manager [-] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Took 1.63 seconds to deallocate network for instance. [ 1315.985447] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.985796] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.985948] env[68279]: DEBUG nova.objects.instance [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'resources' on Instance uuid ec90ec2e-f4a2-4b71-8588-d45a086d9453 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1316.559367] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a32dd0-583d-45b1-bd1c-538450077aa1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.566830] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a7aa1f-2296-4c4b-b5ac-f14f9992056b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.595868] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126262cc-619c-40c2-a6a7-ec10cc777cb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.602548] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec730979-0655-4f8d-b803-9bc4bfa9902a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.614963] env[68279]: DEBUG nova.compute.provider_tree [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1317.136832] env[68279]: ERROR nova.scheduler.client.report [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [req-489e0b6c-5347-4863-a244-e9f121504ad1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 40ba16cf-8244-4715-b8c1-975029462ee4. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-489e0b6c-5347-4863-a244-e9f121504ad1"}]} [ 1317.152233] env[68279]: DEBUG nova.scheduler.client.report [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1317.163733] env[68279]: DEBUG nova.scheduler.client.report [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1317.163927] env[68279]: DEBUG nova.compute.provider_tree [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1317.173414] env[68279]: DEBUG nova.scheduler.client.report [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1317.189539] env[68279]: DEBUG nova.scheduler.client.report [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1317.247257] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b6601d-aa79-4fe1-adb6-40fb689bfcdd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.254732] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5491a71e-d14e-43c1-893b-0f278636035c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.285266] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e086a620-3647-4067-acb9-35a3efcedee0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.291947] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b079d243-a307-40fc-a791-be84e4761c70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.304513] env[68279]: DEBUG nova.compute.provider_tree [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1317.873237] env[68279]: DEBUG nova.scheduler.client.report [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 165 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1317.873237] env[68279]: DEBUG nova.compute.provider_tree [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 165 to 166 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1317.873237] env[68279]: DEBUG nova.compute.provider_tree [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1318.351976] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.366s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.373851] env[68279]: INFO nova.scheduler.client.report [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted allocations for instance ec90ec2e-f4a2-4b71-8588-d45a086d9453 [ 1318.882739] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b2085fbe-6577-4326-aa38-a38949d51718 tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "ec90ec2e-f4a2-4b71-8588-d45a086d9453" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.646s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.670055] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "e246ae0f-1679-4757-acf2-ef5239f3c36d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.670446] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.670446] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "e246ae0f-1679-4757-acf2-ef5239f3c36d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.670529] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.670692] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.672759] env[68279]: INFO nova.compute.manager [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Terminating instance [ 1319.917758] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.917986] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.917986] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.918189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.918362] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.920530] env[68279]: INFO nova.compute.manager [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Terminating instance [ 1320.176776] env[68279]: DEBUG nova.compute.manager [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1320.176965] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1320.177889] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ae54bc-c1c2-46aa-8044-4e75c4fc1649 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.185620] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1320.185845] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6eac09bf-940d-4818-b618-9798421069b2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.192203] env[68279]: DEBUG oslo_vmware.api [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1320.192203] env[68279]: value = "task-2964083" [ 1320.192203] env[68279]: _type = "Task" [ 1320.192203] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.199367] env[68279]: DEBUG oslo_vmware.api [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.424517] env[68279]: DEBUG nova.compute.manager [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1320.424746] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1320.425741] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dbd12b-a111-4b20-9cd3-497b9cd5b45b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.433334] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1320.433569] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78dede72-457c-4139-8a28-dd0abd562fb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.439734] env[68279]: DEBUG oslo_vmware.api [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1320.439734] env[68279]: value = "task-2964084" [ 1320.439734] env[68279]: _type = "Task" [ 1320.439734] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.447760] env[68279]: DEBUG oslo_vmware.api [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.702028] env[68279]: DEBUG oslo_vmware.api [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964083, 'name': PowerOffVM_Task, 'duration_secs': 0.214405} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.702371] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1320.702460] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1320.702670] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c9011e7-bd84-44aa-8af6-80aff76dc1ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.766812] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1320.767047] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1320.767204] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleting the datastore file [datastore1] e246ae0f-1679-4757-acf2-ef5239f3c36d {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1320.767465] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0210fdf-5432-4355-8e0d-5f1a0b03882d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.774049] env[68279]: DEBUG oslo_vmware.api [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1320.774049] env[68279]: value = "task-2964086" [ 1320.774049] env[68279]: _type = "Task" [ 1320.774049] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.781078] env[68279]: DEBUG oslo_vmware.api [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964086, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.949804] env[68279]: DEBUG oslo_vmware.api [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964084, 'name': PowerOffVM_Task, 'duration_secs': 0.167342} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.950092] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1320.950271] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1320.950516] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-568d3910-7b51-4821-92b2-54076242ccbe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.009523] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1321.009716] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1321.009860] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleting the datastore file [datastore1] a4d4e9c0-0165-4c11-ba98-1214e70b91a3 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1321.010190] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97bd60a9-7000-4144-8294-b07cfd08ac75 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.016869] env[68279]: DEBUG oslo_vmware.api [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for the task: (returnval){ [ 1321.016869] env[68279]: value = "task-2964088" [ 1321.016869] env[68279]: _type = "Task" [ 1321.016869] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.024563] env[68279]: DEBUG oslo_vmware.api [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964088, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.283874] env[68279]: DEBUG oslo_vmware.api [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964086, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126792} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.284130] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1321.284272] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1321.284447] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1321.284622] env[68279]: INFO nova.compute.manager [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1321.284859] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1321.285068] env[68279]: DEBUG nova.compute.manager [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1321.285167] env[68279]: DEBUG nova.network.neutron [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1321.526252] env[68279]: DEBUG oslo_vmware.api [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Task: {'id': task-2964088, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143097} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.526546] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1321.526688] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1321.526866] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1321.527049] env[68279]: INFO nova.compute.manager [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1321.527285] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1321.527474] env[68279]: DEBUG nova.compute.manager [-] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1321.527567] env[68279]: DEBUG nova.network.neutron [-] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1321.774783] env[68279]: DEBUG nova.compute.manager [req-c18c4270-bd97-4720-909e-25ad90540d9e req-66fc3aea-0c86-4bbe-9762-c8b8e126b1fd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Received event network-vif-deleted-e9c7f070-ce09-4503-a0e5-76cfe063bc35 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1321.775032] env[68279]: INFO nova.compute.manager [req-c18c4270-bd97-4720-909e-25ad90540d9e req-66fc3aea-0c86-4bbe-9762-c8b8e126b1fd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Neutron deleted interface e9c7f070-ce09-4503-a0e5-76cfe063bc35; detaching it from the instance and deleting it from the info cache [ 1321.775157] env[68279]: DEBUG nova.network.neutron [req-c18c4270-bd97-4720-909e-25ad90540d9e req-66fc3aea-0c86-4bbe-9762-c8b8e126b1fd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.781937] env[68279]: DEBUG nova.compute.manager [req-870b77e6-f22b-4a8d-8151-ed02e3243999 req-cc86f167-343c-49f9-a243-9d505cfc1fe4 service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Received event network-vif-deleted-0f9fd296-d86e-4b84-9e0b-1037b955ee7f {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1321.781937] env[68279]: INFO nova.compute.manager [req-870b77e6-f22b-4a8d-8151-ed02e3243999 req-cc86f167-343c-49f9-a243-9d505cfc1fe4 service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Neutron deleted interface 0f9fd296-d86e-4b84-9e0b-1037b955ee7f; detaching it from the instance and deleting it from the info cache [ 1321.781937] env[68279]: DEBUG nova.network.neutron [req-870b77e6-f22b-4a8d-8151-ed02e3243999 req-cc86f167-343c-49f9-a243-9d505cfc1fe4 service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.258720] env[68279]: DEBUG nova.network.neutron [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.260032] env[68279]: DEBUG nova.network.neutron [-] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.277595] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-795f9076-b523-4f95-ba92-f56a8f6fd47a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.285743] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a837570-fb44-4e2c-bd5d-abc1bc88f10f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.291850] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877f5b47-9575-4dd0-bc00-168668e3e193 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.314124] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3399de-9f51-4b90-9878-efd9a4579149 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.333310] env[68279]: DEBUG nova.compute.manager [req-c18c4270-bd97-4720-909e-25ad90540d9e req-66fc3aea-0c86-4bbe-9762-c8b8e126b1fd service nova] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Detach interface failed, port_id=e9c7f070-ce09-4503-a0e5-76cfe063bc35, reason: Instance a4d4e9c0-0165-4c11-ba98-1214e70b91a3 could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1322.347310] env[68279]: DEBUG nova.compute.manager [req-870b77e6-f22b-4a8d-8151-ed02e3243999 req-cc86f167-343c-49f9-a243-9d505cfc1fe4 service nova] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Detach interface failed, port_id=0f9fd296-d86e-4b84-9e0b-1037b955ee7f, reason: Instance e246ae0f-1679-4757-acf2-ef5239f3c36d could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1322.762580] env[68279]: INFO nova.compute.manager [-] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Took 1.48 seconds to deallocate network for instance. [ 1322.762920] env[68279]: INFO nova.compute.manager [-] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Took 1.24 seconds to deallocate network for instance. [ 1323.272398] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.272767] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.272900] env[68279]: DEBUG nova.objects.instance [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'resources' on Instance uuid e246ae0f-1679-4757-acf2-ef5239f3c36d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1323.274474] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.839879] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e58471-89c9-408e-bbc2-60450753c91d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.847773] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a0e1d6-b39b-43f7-ac52-9486c19c6923 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.878934] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebf5d5c-ad3d-4b43-b120-64d706af74cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.886094] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56e3320-266d-4001-944b-10d2185f1497 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.898897] env[68279]: DEBUG nova.compute.provider_tree [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1324.301523] env[68279]: DEBUG nova.compute.manager [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1324.302483] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0921512-4de1-4532-8092-1ec4e1f68ea3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.430152] env[68279]: DEBUG nova.scheduler.client.report [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updated inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1324.430468] env[68279]: DEBUG nova.compute.provider_tree [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 generation from 166 to 167 during operation: update_inventory {{(pid=68279) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1324.430698] env[68279]: DEBUG nova.compute.provider_tree [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1324.812973] env[68279]: INFO nova.compute.manager [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] instance snapshotting [ 1324.813595] env[68279]: DEBUG nova.objects.instance [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'flavor' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.935604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.663s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.938552] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.663s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.938649] env[68279]: DEBUG nova.objects.instance [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lazy-loading 'resources' on Instance uuid a4d4e9c0-0165-4c11-ba98-1214e70b91a3 {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.952362] env[68279]: INFO nova.scheduler.client.report [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted allocations for instance e246ae0f-1679-4757-acf2-ef5239f3c36d [ 1325.319263] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fce6902-fc91-4571-96f9-6659cf6f2e6f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.338795] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbd0485-9916-43fb-8b2b-6907eec50c66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.459324] env[68279]: DEBUG oslo_concurrency.lockutils [None req-09f4bb81-b7d3-4530-aed3-c7bf7c372062 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e246ae0f-1679-4757-acf2-ef5239f3c36d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.789s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.488401] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e29ebb-8687-4af5-b0d9-931d7f6ae278 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.495636] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f820e991-6ad1-4b04-9f74-f4199027a3a6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.526688] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5a931c-115e-43ff-9ee2-6306abbecfc0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.534347] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72a0ae4-97a5-4e1c-a3eb-f3309e7710bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.547017] env[68279]: DEBUG nova.compute.provider_tree [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.848181] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1325.848503] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2ce3c825-ec61-4c93-b70f-0b8472927e95 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.857856] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1325.857856] env[68279]: value = "task-2964089" [ 1325.857856] env[68279]: _type = "Task" [ 1325.857856] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.866779] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964089, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.050731] env[68279]: DEBUG nova.scheduler.client.report [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1326.168057] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1326.369282] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964089, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.555568] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.571503] env[68279]: INFO nova.scheduler.client.report [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Deleted allocations for instance a4d4e9c0-0165-4c11-ba98-1214e70b91a3 [ 1326.867969] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964089, 'name': CreateSnapshot_Task, 'duration_secs': 0.546496} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.868256] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1326.868961] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cec005-e04a-45f1-bb0a-6b3f86433862 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.079865] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2fd1c5f7-05e1-462c-8e2e-ac835c7419af tempest-AttachInterfacesTestJSON-1001570545 tempest-AttachInterfacesTestJSON-1001570545-project-member] Lock "a4d4e9c0-0165-4c11-ba98-1214e70b91a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.162s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.168077] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.386327] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1327.386640] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-78d5333e-de8f-4d57-9ecd-d23275840ae3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.395895] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1327.395895] env[68279]: value = "task-2964090" [ 1327.395895] env[68279]: _type = "Task" [ 1327.395895] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.404199] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964090, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.499561] env[68279]: DEBUG nova.compute.manager [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1327.906737] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964090, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.026669] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1328.027094] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.407092] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964090, 'name': CloneVM_Task, 'duration_secs': 0.830803} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.407373] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Created linked-clone VM from snapshot [ 1328.408114] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e511bd8-8a06-4e5e-9516-0863599e6917 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.415386] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Uploading image 062f7719-401f-4f06-9170-26b49d9c8951 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1328.440339] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1328.440339] env[68279]: value = "vm-594773" [ 1328.440339] env[68279]: _type = "VirtualMachine" [ 1328.440339] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1328.440599] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ffffd4f3-9331-4718-b461-c5a71bab3646 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.447219] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease: (returnval){ [ 1328.447219] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d5b84-8a78-ec95-18d3-64580228dca5" [ 1328.447219] env[68279]: _type = "HttpNfcLease" [ 1328.447219] env[68279]: } obtained for exporting VM: (result){ [ 1328.447219] env[68279]: value = "vm-594773" [ 1328.447219] env[68279]: _type = "VirtualMachine" [ 1328.447219] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1328.447501] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the lease: (returnval){ [ 1328.447501] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d5b84-8a78-ec95-18d3-64580228dca5" [ 1328.447501] env[68279]: _type = "HttpNfcLease" [ 1328.447501] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1328.453651] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1328.453651] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d5b84-8a78-ec95-18d3-64580228dca5" [ 1328.453651] env[68279]: _type = "HttpNfcLease" [ 1328.453651] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1328.532684] env[68279]: INFO nova.compute.claims [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1328.956392] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1328.956392] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d5b84-8a78-ec95-18d3-64580228dca5" [ 1328.956392] env[68279]: _type = "HttpNfcLease" [ 1328.956392] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1328.956704] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1328.956704] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521d5b84-8a78-ec95-18d3-64580228dca5" [ 1328.956704] env[68279]: _type = "HttpNfcLease" [ 1328.956704] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1328.957440] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db94cfa-9be1-4f30-8ea2-c82764e2461a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.964944] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52113f28-6221-23cd-6144-0c700a698505/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1328.965000] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52113f28-6221-23cd-6144-0c700a698505/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1329.039417] env[68279]: INFO nova.compute.resource_tracker [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating resource usage from migration e38e8538-152c-4af9-9a94-fea61acfb43e [ 1329.058719] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-29d529c0-68c3-4833-ac20-b1efb3a0c5d7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.097763] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecede11-d5d5-4eae-9800-a71c8e833504 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.105058] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d50338-7903-4509-9eae-fb3520ce06cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.136205] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8a5fad-0755-4a62-9185-cd32ce570ac3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.144318] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddc9d76-f361-4e97-ba4c-710d8e0b722c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.158289] env[68279]: DEBUG nova.compute.provider_tree [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.163544] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.168262] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.168416] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1329.168588] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.661864] env[68279]: DEBUG nova.scheduler.client.report [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1329.671752] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.167164] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.140s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.167567] env[68279]: INFO nova.compute.manager [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Migrating [ 1330.174604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.503s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.174850] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.174966] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1330.178716] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd975d5-38ff-4f00-8766-7aa5ce93172a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.192473] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7f620b-6449-4cd1-8d6a-5ff0503d7c32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.208863] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4480b9b5-634d-45f2-b4e2-4a8d0982f979 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.216497] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7ff557-ec6f-4160-940d-005d4403a4a7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.257914] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180471MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1330.258212] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.258579] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.687812] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.688177] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1330.688177] env[68279]: DEBUG nova.network.neutron [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1331.268995] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Applying migration context for instance 84b2828a-e62c-45b2-a5ee-067ca66e626b as it has an incoming, in-progress migration e38e8538-152c-4af9-9a94-fea61acfb43e. Migration status is pre-migrating {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1331.269754] env[68279]: INFO nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating resource usage from migration e38e8538-152c-4af9-9a94-fea61acfb43e [ 1331.288275] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 04f42241-5598-47e2-906c-998a19da434f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1331.288433] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Migration e38e8538-152c-4af9-9a94-fea61acfb43e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1331.288558] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 84b2828a-e62c-45b2-a5ee-067ca66e626b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1331.288775] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1331.288918] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1331.348059] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05ae165-c334-4e11-bdd4-56887c3c684a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.355947] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4a5f27-8b69-4759-9c8a-476823c91a36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.388684] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbff27a-678f-46de-bd06-1a9ebda29e08 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.396284] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a456654-8d3b-4f07-8654-45a607f54317 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.409580] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.451727] env[68279]: DEBUG nova.network.neutron [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.913991] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1331.955141] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.419243] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1332.419454] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.161s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.469063] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d983fda-1094-4bc5-8be0-56ada5243c0b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.488738] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance '84b2828a-e62c-45b2-a5ee-067ca66e626b' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1333.995424] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1333.995765] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5f2902b-d049-4f7a-bd03-cb0753f3642b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.003682] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1334.003682] env[68279]: value = "task-2964092" [ 1334.003682] env[68279]: _type = "Task" [ 1334.003682] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.011798] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.515112] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964092, 'name': PowerOffVM_Task, 'duration_secs': 0.325144} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.515582] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.515647] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance '84b2828a-e62c-45b2-a5ee-067ca66e626b' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1335.023399] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1335.023711] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1335.023825] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1335.024056] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1335.024173] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1335.024324] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1335.024534] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1335.024695] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1335.024865] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1335.025050] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1335.025234] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1335.030485] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7f2d4d9-e943-4f6c-bb63-f98055dc9157 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.047539] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1335.047539] env[68279]: value = "task-2964093" [ 1335.047539] env[68279]: _type = "Task" [ 1335.047539] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.056093] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.561411] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964093, 'name': ReconfigVM_Task, 'duration_secs': 0.204925} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.561836] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance '84b2828a-e62c-45b2-a5ee-067ca66e626b' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1336.070520] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1336.070830] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1336.070939] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1336.071164] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1336.071314] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1336.071461] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1336.071669] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1336.071829] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1336.071997] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1336.072180] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1336.072357] env[68279]: DEBUG nova.virt.hardware [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1336.077716] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1336.078027] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d71a214f-41e8-44b2-86a7-a24b89e4d39a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.097134] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1336.097134] env[68279]: value = "task-2964094" [ 1336.097134] env[68279]: _type = "Task" [ 1336.097134] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.105229] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964094, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.420536] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.420794] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.420969] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.607375] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964094, 'name': ReconfigVM_Task, 'duration_secs': 0.20337} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.607659] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1336.608494] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45dde1cd-138d-42d3-b025-f96a8b64697b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.632424] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 84b2828a-e62c-45b2-a5ee-067ca66e626b/84b2828a-e62c-45b2-a5ee-067ca66e626b.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1336.633065] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99ef7420-9616-4849-87f8-7c43c299387f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.651251] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1336.651251] env[68279]: value = "task-2964095" [ 1336.651251] env[68279]: _type = "Task" [ 1336.651251] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.658988] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964095, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.160897] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964095, 'name': ReconfigVM_Task, 'duration_secs': 0.319113} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.161204] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 84b2828a-e62c-45b2-a5ee-067ca66e626b/84b2828a-e62c-45b2-a5ee-067ca66e626b.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1337.161478] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance '84b2828a-e62c-45b2-a5ee-067ca66e626b' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1337.667943] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3a7f44-5c02-4a0e-a6f3-06bda1ac87fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.687214] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140d228b-59f2-408d-be14-913441d27a8d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.705167] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance '84b2828a-e62c-45b2-a5ee-067ca66e626b' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1337.769312] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52113f28-6221-23cd-6144-0c700a698505/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1337.770275] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d170f327-dcf9-44e1-b06f-3ef67a35e82a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.776974] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52113f28-6221-23cd-6144-0c700a698505/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1337.777178] env[68279]: ERROR oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52113f28-6221-23cd-6144-0c700a698505/disk-0.vmdk due to incomplete transfer. [ 1337.777403] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1b0f56cc-6c99-450f-837c-8c8d3bea7dab {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.784194] env[68279]: DEBUG oslo_vmware.rw_handles [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52113f28-6221-23cd-6144-0c700a698505/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1337.784398] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Uploaded image 062f7719-401f-4f06-9170-26b49d9c8951 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1337.786763] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1337.786996] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-814733e4-926e-4e8e-b063-03180903a82f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.793185] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1337.793185] env[68279]: value = "task-2964096" [ 1337.793185] env[68279]: _type = "Task" [ 1337.793185] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.800242] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964096, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.242898] env[68279]: DEBUG nova.network.neutron [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Port 80e209dd-e4b0-4331-87a6-92e23bdfa270 binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1338.302853] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964096, 'name': Destroy_Task, 'duration_secs': 0.310602} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.303067] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Destroyed the VM [ 1338.303303] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1338.303549] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c1744653-a211-48b1-a07d-0d168d461521 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.311235] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1338.311235] env[68279]: value = "task-2964097" [ 1338.311235] env[68279]: _type = "Task" [ 1338.311235] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.318658] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964097, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.820476] env[68279]: DEBUG oslo_vmware.api [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964097, 'name': RemoveSnapshot_Task, 'duration_secs': 0.489754} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.820933] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1338.820990] env[68279]: INFO nova.compute.manager [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Took 13.50 seconds to snapshot the instance on the hypervisor. [ 1339.264968] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.265210] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.265383] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.374424] env[68279]: DEBUG nova.compute.manager [None req-f2194b91-2347-4807-b74a-c84637d42fe0 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Found 1 images (rotation: 2) {{(pid=68279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1340.155934] env[68279]: DEBUG nova.compute.manager [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1340.156897] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf609a1a-57bd-4547-8a8c-1894b58a6cd0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.299408] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.299597] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1340.299773] env[68279]: DEBUG nova.network.neutron [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.667863] env[68279]: INFO nova.compute.manager [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] instance snapshotting [ 1340.668524] env[68279]: DEBUG nova.objects.instance [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'flavor' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.007915] env[68279]: DEBUG nova.network.neutron [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.174321] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a6a180-2321-444c-9960-5661562d65ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.194225] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04fdc74-a77d-4234-83f3-2c7cacf06e97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.510492] env[68279]: DEBUG oslo_concurrency.lockutils [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1341.704443] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1341.704751] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9aa2bcc1-bcf3-40ea-93f9-81f175866bd8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.712248] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1341.712248] env[68279]: value = "task-2964098" [ 1341.712248] env[68279]: _type = "Task" [ 1341.712248] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.719758] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964098, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.035184] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab51e3f-0401-4b80-a948-93708631ccf2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.054240] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d912107b-074e-4bbe-83ff-83d26c8910e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.060907] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance '84b2828a-e62c-45b2-a5ee-067ca66e626b' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1342.222205] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964098, 'name': CreateSnapshot_Task, 'duration_secs': 0.407909} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.222531] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1342.223215] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8210a6-820e-49ef-8743-9a32e01288d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.566891] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.567167] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a688abf-bc25-42b2-bf1f-01ba5f33fd65 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.574913] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1342.574913] env[68279]: value = "task-2964099" [ 1342.574913] env[68279]: _type = "Task" [ 1342.574913] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.582569] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964099, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.739883] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1342.740228] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-83990785-0b1e-4ae8-9547-804e960e67ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.748539] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1342.748539] env[68279]: value = "task-2964100" [ 1342.748539] env[68279]: _type = "Task" [ 1342.748539] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.756185] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964100, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.087627] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964099, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.258644] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964100, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.587251] env[68279]: DEBUG oslo_vmware.api [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964099, 'name': PowerOnVM_Task, 'duration_secs': 0.51428} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.587486] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1343.587631] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-687e8ff6-ec1c-4a83-9c32-a2329c4000aa tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance '84b2828a-e62c-45b2-a5ee-067ca66e626b' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1343.759702] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964100, 'name': CloneVM_Task, 'duration_secs': 0.851911} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.759957] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Created linked-clone VM from snapshot [ 1343.760677] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c435d79-8113-4f48-b3cb-9f729e48d555 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.767879] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Uploading image a77f8212-27ea-46fe-8b0e-021389e2d26e {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1343.786946] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1343.786946] env[68279]: value = "vm-594775" [ 1343.786946] env[68279]: _type = "VirtualMachine" [ 1343.786946] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1343.787205] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-41cb43e5-be30-426c-9d44-2e81e552b31f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.793338] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease: (returnval){ [ 1343.793338] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521849cb-c86d-c850-ee43-007ca064f1bb" [ 1343.793338] env[68279]: _type = "HttpNfcLease" [ 1343.793338] env[68279]: } obtained for exporting VM: (result){ [ 1343.793338] env[68279]: value = "vm-594775" [ 1343.793338] env[68279]: _type = "VirtualMachine" [ 1343.793338] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1343.793619] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the lease: (returnval){ [ 1343.793619] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521849cb-c86d-c850-ee43-007ca064f1bb" [ 1343.793619] env[68279]: _type = "HttpNfcLease" [ 1343.793619] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1343.799353] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1343.799353] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521849cb-c86d-c850-ee43-007ca064f1bb" [ 1343.799353] env[68279]: _type = "HttpNfcLease" [ 1343.799353] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1344.302015] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1344.302015] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521849cb-c86d-c850-ee43-007ca064f1bb" [ 1344.302015] env[68279]: _type = "HttpNfcLease" [ 1344.302015] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1344.302493] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1344.302493] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521849cb-c86d-c850-ee43-007ca064f1bb" [ 1344.302493] env[68279]: _type = "HttpNfcLease" [ 1344.302493] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1344.303023] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49249770-a13e-417f-bed9-264fd7fdd2fc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.309735] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ea407b-5329-eac2-7efd-f6136d10de63/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1344.309906] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ea407b-5329-eac2-7efd-f6136d10de63/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1344.419897] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-330715b0-e61d-4581-97d1-dcad569f8084 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.880868] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.882678] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.882678] env[68279]: DEBUG nova.compute.manager [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Going to confirm migration 7 {{(pid=68279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1346.451595] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.451784] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1346.451974] env[68279]: DEBUG nova.network.neutron [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1346.452196] env[68279]: DEBUG nova.objects.instance [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'info_cache' on Instance uuid 84b2828a-e62c-45b2-a5ee-067ca66e626b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.651560] env[68279]: DEBUG nova.network.neutron [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [{"id": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "address": "fa:16:3e:58:6b:db", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e209dd-e4", "ovs_interfaceid": "80e209dd-e4b0-4331-87a6-92e23bdfa270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.154257] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-84b2828a-e62c-45b2-a5ee-067ca66e626b" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1348.154569] env[68279]: DEBUG nova.objects.instance [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'migration_context' on Instance uuid 84b2828a-e62c-45b2-a5ee-067ca66e626b {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1348.658126] env[68279]: DEBUG nova.objects.base [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Object Instance<84b2828a-e62c-45b2-a5ee-067ca66e626b> lazy-loaded attributes: info_cache,migration_context {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1348.659112] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cece0aef-958c-4738-b52d-c2599df8e442 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.680505] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bc44250-8fc4-49c8-9b0a-c174697f4f6a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.686721] env[68279]: DEBUG oslo_vmware.api [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1348.686721] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52586646-1202-f860-122c-47395f1b1c10" [ 1348.686721] env[68279]: _type = "Task" [ 1348.686721] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.694761] env[68279]: DEBUG oslo_vmware.api [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52586646-1202-f860-122c-47395f1b1c10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.197090] env[68279]: DEBUG oslo_vmware.api [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52586646-1202-f860-122c-47395f1b1c10, 'name': SearchDatastore_Task, 'duration_secs': 0.008162} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.197397] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.197644] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.754566] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573bda8a-fe62-44e7-bb1d-1a7eba6fd8cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.762932] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147ea6a2-c0c3-47d1-a1b2-f3a8fcc84c46 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.794440] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02d2179-8b08-4341-9697-71d2f16581a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.801692] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70469d8e-1c45-4498-9c94-42205b658e32 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.815303] env[68279]: DEBUG nova.compute.provider_tree [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.319365] env[68279]: DEBUG nova.scheduler.client.report [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1351.330098] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.132s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.888611] env[68279]: INFO nova.scheduler.client.report [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted allocation for migration e38e8538-152c-4af9-9a94-fea61acfb43e [ 1352.393852] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6dbba631-458d-4038-933e-60cdee7776cb tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.512s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.838298] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.838543] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.838751] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.838931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.839115] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.841217] env[68279]: INFO nova.compute.manager [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Terminating instance [ 1353.046188] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ea407b-5329-eac2-7efd-f6136d10de63/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1353.047225] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089afd62-631b-4651-bbfb-b55c3ae00aa8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.053509] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ea407b-5329-eac2-7efd-f6136d10de63/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1353.053670] env[68279]: ERROR oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ea407b-5329-eac2-7efd-f6136d10de63/disk-0.vmdk due to incomplete transfer. [ 1353.053870] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f31c69b2-7f61-4e83-9031-7054c022e385 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.061653] env[68279]: DEBUG oslo_vmware.rw_handles [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ea407b-5329-eac2-7efd-f6136d10de63/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1353.061842] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Uploaded image a77f8212-27ea-46fe-8b0e-021389e2d26e to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1353.064090] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1353.064317] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a567811e-d1c9-4b67-a2ec-0950c26923ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.070894] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1353.070894] env[68279]: value = "task-2964102" [ 1353.070894] env[68279]: _type = "Task" [ 1353.070894] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.078426] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964102, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.345141] env[68279]: DEBUG nova.compute.manager [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1353.345335] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1353.346316] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2280ac15-7872-4f72-b674-c3a72a9386d4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.353849] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1353.354153] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e84831b4-f94c-4394-bf02-d46646357715 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.360014] env[68279]: DEBUG oslo_vmware.api [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1353.360014] env[68279]: value = "task-2964103" [ 1353.360014] env[68279]: _type = "Task" [ 1353.360014] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.368933] env[68279]: DEBUG oslo_vmware.api [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.580851] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964102, 'name': Destroy_Task, 'duration_secs': 0.307355} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.581272] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Destroyed the VM [ 1353.581355] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1353.581604] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b2be11d9-b1ea-4547-875e-c10e974ce049 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.587624] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1353.587624] env[68279]: value = "task-2964104" [ 1353.587624] env[68279]: _type = "Task" [ 1353.587624] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.595347] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964104, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.868948] env[68279]: DEBUG oslo_vmware.api [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964103, 'name': PowerOffVM_Task, 'duration_secs': 0.249182} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.869190] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1353.869362] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1353.869600] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2fd22c76-e7ab-4309-a852-84d29f1ed9a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.930999] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1353.931249] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1353.931413] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleting the datastore file [datastore2] 84b2828a-e62c-45b2-a5ee-067ca66e626b {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.931701] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc649264-29a2-4b10-b120-c2f8518c613c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.937929] env[68279]: DEBUG oslo_vmware.api [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1353.937929] env[68279]: value = "task-2964106" [ 1353.937929] env[68279]: _type = "Task" [ 1353.937929] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.945481] env[68279]: DEBUG oslo_vmware.api [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.098180] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964104, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.447299] env[68279]: DEBUG oslo_vmware.api [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231246} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.447556] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1354.447745] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1354.447926] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1354.448118] env[68279]: INFO nova.compute.manager [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1354.448367] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1354.448561] env[68279]: DEBUG nova.compute.manager [-] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1354.448660] env[68279]: DEBUG nova.network.neutron [-] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1354.599212] env[68279]: DEBUG oslo_vmware.api [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964104, 'name': RemoveSnapshot_Task, 'duration_secs': 0.554932} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.599578] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1354.599713] env[68279]: INFO nova.compute.manager [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Took 13.43 seconds to snapshot the instance on the hypervisor. [ 1354.857083] env[68279]: DEBUG nova.compute.manager [req-64639a0e-7ef5-486e-b316-4dbb49012fb6 req-78d9808d-40ba-45e6-94a1-6e92a8cc2d7a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Received event network-vif-deleted-80e209dd-e4b0-4331-87a6-92e23bdfa270 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1354.857318] env[68279]: INFO nova.compute.manager [req-64639a0e-7ef5-486e-b316-4dbb49012fb6 req-78d9808d-40ba-45e6-94a1-6e92a8cc2d7a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Neutron deleted interface 80e209dd-e4b0-4331-87a6-92e23bdfa270; detaching it from the instance and deleting it from the info cache [ 1354.857492] env[68279]: DEBUG nova.network.neutron [req-64639a0e-7ef5-486e-b316-4dbb49012fb6 req-78d9808d-40ba-45e6-94a1-6e92a8cc2d7a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.144360] env[68279]: DEBUG nova.compute.manager [None req-8c645494-1971-4a3a-8aeb-6b35074de5e4 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Found 2 images (rotation: 2) {{(pid=68279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1355.341498] env[68279]: DEBUG nova.network.neutron [-] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.360110] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4931e0e9-33f2-43d7-9022-9b4fd9228f4e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.370638] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9665501-83be-4303-822d-25c6fcc1b813 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.397563] env[68279]: DEBUG nova.compute.manager [req-64639a0e-7ef5-486e-b316-4dbb49012fb6 req-78d9808d-40ba-45e6-94a1-6e92a8cc2d7a service nova] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Detach interface failed, port_id=80e209dd-e4b0-4331-87a6-92e23bdfa270, reason: Instance 84b2828a-e62c-45b2-a5ee-067ca66e626b could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1355.844422] env[68279]: INFO nova.compute.manager [-] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Took 1.40 seconds to deallocate network for instance. [ 1356.009329] env[68279]: DEBUG nova.compute.manager [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.010253] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ee707d-957a-4176-8109-1802146439ad {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.350756] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.351039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.351243] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.372660] env[68279]: INFO nova.scheduler.client.report [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted allocations for instance 84b2828a-e62c-45b2-a5ee-067ca66e626b [ 1356.520474] env[68279]: INFO nova.compute.manager [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] instance snapshotting [ 1356.521099] env[68279]: DEBUG nova.objects.instance [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'flavor' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1356.879873] env[68279]: DEBUG oslo_concurrency.lockutils [None req-86d96c29-53ff-4bfa-876b-da17d6ef4a43 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "84b2828a-e62c-45b2-a5ee-067ca66e626b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.041s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.026745] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8198f707-0860-4d3b-8d42-ca7043ae727c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.046163] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a12abcb-7b17-4a91-a53c-d95f022ab898 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.557712] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1357.557712] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8c107988-5adb-492c-b216-edad859fb766 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.564634] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1357.564634] env[68279]: value = "task-2964107" [ 1357.564634] env[68279]: _type = "Task" [ 1357.564634] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.572729] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964107, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.675570] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1357.675851] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.074515] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964107, 'name': CreateSnapshot_Task, 'duration_secs': 0.384166} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.074873] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1358.075498] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1644a243-313c-410f-99ae-e5bb8b74e4f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.178569] env[68279]: DEBUG nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1358.592237] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1358.592538] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-472f7608-b339-4bea-9e82-935c0dbfecfc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.601461] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1358.601461] env[68279]: value = "task-2964108" [ 1358.601461] env[68279]: _type = "Task" [ 1358.601461] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.609177] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964108, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.700313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.700624] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.702252] env[68279]: INFO nova.compute.claims [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1359.112561] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964108, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.612222] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964108, 'name': CloneVM_Task, 'duration_secs': 0.880625} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.612470] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Created linked-clone VM from snapshot [ 1359.613160] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c178bcf-e907-42ce-9b06-765e6b50933f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.620135] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Uploading image 73390f89-3784-48c7-8e2f-b6905ebc22a6 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1359.645788] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1359.645788] env[68279]: value = "vm-594777" [ 1359.645788] env[68279]: _type = "VirtualMachine" [ 1359.645788] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1359.646068] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d570c39d-52f6-4467-b6ef-a83d35c3f20c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.652550] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease: (returnval){ [ 1359.652550] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7fef7-c077-f1b2-3d9a-806e9c77e8d6" [ 1359.652550] env[68279]: _type = "HttpNfcLease" [ 1359.652550] env[68279]: } obtained for exporting VM: (result){ [ 1359.652550] env[68279]: value = "vm-594777" [ 1359.652550] env[68279]: _type = "VirtualMachine" [ 1359.652550] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1359.652811] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the lease: (returnval){ [ 1359.652811] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7fef7-c077-f1b2-3d9a-806e9c77e8d6" [ 1359.652811] env[68279]: _type = "HttpNfcLease" [ 1359.652811] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1359.658395] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1359.658395] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7fef7-c077-f1b2-3d9a-806e9c77e8d6" [ 1359.658395] env[68279]: _type = "HttpNfcLease" [ 1359.658395] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1359.746050] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65d8109-3ab6-416d-b619-2cc7dac5d87d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.752371] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caff7a01-f3a3-4f52-99db-f413e766ced7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.782577] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b12a640-06fe-457d-ae17-1804261f6a9b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.789425] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096ffc0e-5e1d-476c-a1b8-f7cdab7adce4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.802120] env[68279]: DEBUG nova.compute.provider_tree [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.161010] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1360.161010] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7fef7-c077-f1b2-3d9a-806e9c77e8d6" [ 1360.161010] env[68279]: _type = "HttpNfcLease" [ 1360.161010] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1360.161529] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1360.161529] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52b7fef7-c077-f1b2-3d9a-806e9c77e8d6" [ 1360.161529] env[68279]: _type = "HttpNfcLease" [ 1360.161529] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1360.162011] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6de19e1-db5d-49a4-99c8-b04fcf642e0a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.168871] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d50d78-2754-1af7-ef6e-6129f1bdc060/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1360.169052] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d50d78-2754-1af7-ef6e-6129f1bdc060/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1360.255499] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a112e606-75c1-4fe6-93ea-e1d0b7ebab23 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.305146] env[68279]: DEBUG nova.scheduler.client.report [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1360.811056] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.811383] env[68279]: DEBUG nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1361.317370] env[68279]: DEBUG nova.compute.utils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1361.319453] env[68279]: DEBUG nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1361.319662] env[68279]: DEBUG nova.network.neutron [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1361.358295] env[68279]: DEBUG nova.policy [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94e9d9c1927948f5bb8f42235b09f008', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d7a90a39b864e3e985b3b828c3fd363', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1361.626190] env[68279]: DEBUG nova.network.neutron [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Successfully created port: 64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1361.820178] env[68279]: DEBUG nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1362.832053] env[68279]: DEBUG nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1362.858172] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1362.858480] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1362.858658] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1362.858914] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1362.859025] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1362.859138] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1362.859365] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1362.859726] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1362.859811] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1362.859981] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1362.860238] env[68279]: DEBUG nova.virt.hardware [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1362.861203] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4ef700-7e63-42ad-8c02-2b2a08455b93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.870048] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f597279a-55e6-4852-a76c-a89b693acf7f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.984987] env[68279]: DEBUG nova.compute.manager [req-0b704df0-5a03-4891-99d3-b47cd148c988 req-e01750da-0e9e-42f6-9fef-95a25ded5694 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Received event network-vif-plugged-64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1362.984987] env[68279]: DEBUG oslo_concurrency.lockutils [req-0b704df0-5a03-4891-99d3-b47cd148c988 req-e01750da-0e9e-42f6-9fef-95a25ded5694 service nova] Acquiring lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.984987] env[68279]: DEBUG oslo_concurrency.lockutils [req-0b704df0-5a03-4891-99d3-b47cd148c988 req-e01750da-0e9e-42f6-9fef-95a25ded5694 service nova] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.984987] env[68279]: DEBUG oslo_concurrency.lockutils [req-0b704df0-5a03-4891-99d3-b47cd148c988 req-e01750da-0e9e-42f6-9fef-95a25ded5694 service nova] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.984987] env[68279]: DEBUG nova.compute.manager [req-0b704df0-5a03-4891-99d3-b47cd148c988 req-e01750da-0e9e-42f6-9fef-95a25ded5694 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] No waiting events found dispatching network-vif-plugged-64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1362.984987] env[68279]: WARNING nova.compute.manager [req-0b704df0-5a03-4891-99d3-b47cd148c988 req-e01750da-0e9e-42f6-9fef-95a25ded5694 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Received unexpected event network-vif-plugged-64d7b0c3-6342-46df-b3f7-dd7387e97727 for instance with vm_state building and task_state spawning. [ 1363.069884] env[68279]: DEBUG nova.network.neutron [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Successfully updated port: 64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1363.574420] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.574420] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1363.574420] env[68279]: DEBUG nova.network.neutron [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.108381] env[68279]: DEBUG nova.network.neutron [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1364.239591] env[68279]: DEBUG nova.network.neutron [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.742914] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1364.743258] env[68279]: DEBUG nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Instance network_info: |[{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1364.743696] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:98:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '051f343d-ac4f-4070-a26d-467603122c81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64d7b0c3-6342-46df-b3f7-dd7387e97727', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1364.751268] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1364.751484] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1364.751715] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-679e468f-82ed-45b5-ae29-0b3823b0d5f4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.771785] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1364.771785] env[68279]: value = "task-2964110" [ 1364.771785] env[68279]: _type = "Task" [ 1364.771785] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.779698] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964110, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.010693] env[68279]: DEBUG nova.compute.manager [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Received event network-changed-64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1365.010857] env[68279]: DEBUG nova.compute.manager [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Refreshing instance network info cache due to event network-changed-64d7b0c3-6342-46df-b3f7-dd7387e97727. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1365.011109] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] Acquiring lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.011269] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] Acquired lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1365.011438] env[68279]: DEBUG nova.network.neutron [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Refreshing network info cache for port 64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1365.282425] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964110, 'name': CreateVM_Task, 'duration_secs': 0.339514} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.282801] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1365.283341] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.283536] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1365.283930] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1365.284219] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-197ae706-473a-4ec8-a6b7-becb6345b9ea {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.289241] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1365.289241] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52853b63-92b1-36b4-3c6a-8539c8faec92" [ 1365.289241] env[68279]: _type = "Task" [ 1365.289241] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.297260] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52853b63-92b1-36b4-3c6a-8539c8faec92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.708793] env[68279]: DEBUG nova.network.neutron [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updated VIF entry in instance network info cache for port 64d7b0c3-6342-46df-b3f7-dd7387e97727. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1365.709235] env[68279]: DEBUG nova.network.neutron [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.801913] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52853b63-92b1-36b4-3c6a-8539c8faec92, 'name': SearchDatastore_Task, 'duration_secs': 0.015955} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.802242] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1365.802497] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1365.802771] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.802945] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1365.803176] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1365.803466] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0de0ded8-d5f9-49f0-820c-9408a66ddf55 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.812168] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1365.812332] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1365.813034] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6d108ff-62bc-49e0-ab59-ab546cffa9e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.818016] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1365.818016] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525c2ac9-c427-f513-2146-28ec0601d78f" [ 1365.818016] env[68279]: _type = "Task" [ 1365.818016] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.825457] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525c2ac9-c427-f513-2146-28ec0601d78f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.211783] env[68279]: DEBUG oslo_concurrency.lockutils [req-4d239266-5045-491b-a008-31d292b756a5 req-caf41a2c-20d3-47f2-bdf9-bb1a2ffe657d service nova] Releasing lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1366.328720] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]525c2ac9-c427-f513-2146-28ec0601d78f, 'name': SearchDatastore_Task, 'duration_secs': 0.012127} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.329499] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49f3f734-e85a-40cc-a8d3-117f5a53bdf2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.334466] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1366.334466] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5276a991-e092-3f77-f06f-4999f4012079" [ 1366.334466] env[68279]: _type = "Task" [ 1366.334466] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.342148] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5276a991-e092-3f77-f06f-4999f4012079, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.844819] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5276a991-e092-3f77-f06f-4999f4012079, 'name': SearchDatastore_Task, 'duration_secs': 0.012876} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.845096] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1366.845357] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1366.845627] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4261b4e8-84d2-41db-8e8c-e6cb9c8182cb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.855240] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1366.855240] env[68279]: value = "task-2964111" [ 1366.855240] env[68279]: _type = "Task" [ 1366.855240] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.863306] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.367453] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964111, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.650163] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d50d78-2754-1af7-ef6e-6129f1bdc060/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1367.651077] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756b13a6-57a3-44e0-8653-b7145dfadd8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.657205] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d50d78-2754-1af7-ef6e-6129f1bdc060/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1367.657369] env[68279]: ERROR oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d50d78-2754-1af7-ef6e-6129f1bdc060/disk-0.vmdk due to incomplete transfer. [ 1367.657570] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bca36b73-05ab-4bb9-8476-cd76ff01fa92 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.663759] env[68279]: DEBUG oslo_vmware.rw_handles [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d50d78-2754-1af7-ef6e-6129f1bdc060/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1367.663960] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Uploaded image 73390f89-3784-48c7-8e2f-b6905ebc22a6 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1367.666333] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1367.666554] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ed6e6902-8656-4314-9516-ad34acb643ac {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.671685] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1367.671685] env[68279]: value = "task-2964112" [ 1367.671685] env[68279]: _type = "Task" [ 1367.671685] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.679932] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964112, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.866603] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566413} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.866858] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1367.867131] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1367.867374] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca125b53-356a-496a-9095-f83f23d295eb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.874504] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1367.874504] env[68279]: value = "task-2964113" [ 1367.874504] env[68279]: _type = "Task" [ 1367.874504] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.882111] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.181168] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964112, 'name': Destroy_Task, 'duration_secs': 0.352066} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.181414] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Destroyed the VM [ 1368.181646] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1368.181889] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-edfc3d03-b6e6-48b4-9191-62d1a43610df {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.188050] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1368.188050] env[68279]: value = "task-2964114" [ 1368.188050] env[68279]: _type = "Task" [ 1368.188050] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.195540] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964114, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.383938] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062587} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.384376] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1368.384951] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dda5eee-90b6-4aed-880c-5e5c892af6b5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.407192] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1368.407421] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a7ef376-a238-44ce-ac5a-4a621f683c1e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.427241] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1368.427241] env[68279]: value = "task-2964115" [ 1368.427241] env[68279]: _type = "Task" [ 1368.427241] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.439669] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964115, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.698436] env[68279]: DEBUG oslo_vmware.api [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964114, 'name': RemoveSnapshot_Task, 'duration_secs': 0.457827} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.698660] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1368.698963] env[68279]: INFO nova.compute.manager [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Took 11.67 seconds to snapshot the instance on the hypervisor. [ 1368.937874] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964115, 'name': ReconfigVM_Task, 'duration_secs': 0.26066} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.937874] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfigured VM instance instance-0000007b to attach disk [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1368.938476] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92243074-45f3-401b-97e1-7431a6cc0fec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.944612] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1368.944612] env[68279]: value = "task-2964116" [ 1368.944612] env[68279]: _type = "Task" [ 1368.944612] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.952741] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964116, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.240357] env[68279]: DEBUG nova.compute.manager [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Found 3 images (rotation: 2) {{(pid=68279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1369.240555] env[68279]: DEBUG nova.compute.manager [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Rotating out 1 backups {{(pid=68279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1369.240718] env[68279]: DEBUG nova.compute.manager [None req-c06100bf-da36-4c8a-a39e-4709d1bd84da tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleting image 062f7719-401f-4f06-9170-26b49d9c8951 {{(pid=68279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1369.454089] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964116, 'name': Rename_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.954824] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964116, 'name': Rename_Task, 'duration_secs': 0.824921} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.955108] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1369.955362] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f47f9efd-ed53-4e9d-96f2-3be1701872f1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.961607] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1369.961607] env[68279]: value = "task-2964117" [ 1369.961607] env[68279]: _type = "Task" [ 1369.961607] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.969918] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.471831] env[68279]: DEBUG oslo_vmware.api [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964117, 'name': PowerOnVM_Task, 'duration_secs': 0.46625} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.472147] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1370.472375] env[68279]: INFO nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1370.472553] env[68279]: DEBUG nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1370.473369] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ba7cba-13fc-4b10-a7bf-2625dcf2eb7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.992235] env[68279]: INFO nova.compute.manager [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Took 12.31 seconds to build instance. [ 1371.026880] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "04f42241-5598-47e2-906c-998a19da434f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1371.027155] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1371.027350] env[68279]: DEBUG nova.compute.manager [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1371.028502] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41a86d4-11a7-4058-890d-0ada740bd304 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.035564] env[68279]: DEBUG nova.compute.manager [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1371.036153] env[68279]: DEBUG nova.objects.instance [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'flavor' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1371.494243] env[68279]: DEBUG oslo_concurrency.lockutils [None req-97a2fe5b-3feb-41af-8724-696b7d7996ef tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.818s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.728306] env[68279]: DEBUG nova.compute.manager [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Received event network-changed-64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1371.728480] env[68279]: DEBUG nova.compute.manager [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Refreshing instance network info cache due to event network-changed-64d7b0c3-6342-46df-b3f7-dd7387e97727. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1371.729171] env[68279]: DEBUG oslo_concurrency.lockutils [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] Acquiring lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.729171] env[68279]: DEBUG oslo_concurrency.lockutils [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] Acquired lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1371.729171] env[68279]: DEBUG nova.network.neutron [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Refreshing network info cache for port 64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.044084] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1372.044393] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-199983df-9bcd-4915-b1a6-3b389bdad475 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.052246] env[68279]: DEBUG oslo_vmware.api [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1372.052246] env[68279]: value = "task-2964118" [ 1372.052246] env[68279]: _type = "Task" [ 1372.052246] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.061312] env[68279]: DEBUG oslo_vmware.api [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.419742] env[68279]: DEBUG nova.network.neutron [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updated VIF entry in instance network info cache for port 64d7b0c3-6342-46df-b3f7-dd7387e97727. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1372.420112] env[68279]: DEBUG nova.network.neutron [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.561909] env[68279]: DEBUG oslo_vmware.api [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964118, 'name': PowerOffVM_Task, 'duration_secs': 0.153322} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.562286] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1372.562335] env[68279]: DEBUG nova.compute.manager [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1372.563070] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db9dadd-957b-4806-b4f3-86539bfcbee1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.922415] env[68279]: DEBUG oslo_concurrency.lockutils [req-73477371-b36d-4a1c-b797-2c3115389520 req-89bde48a-2f01-4edf-b242-349e72c66560 service nova] Releasing lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1373.074626] env[68279]: DEBUG oslo_concurrency.lockutils [None req-eae69789-757c-4b6e-bc8d-627d76a8e821 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.313769] env[68279]: DEBUG nova.compute.manager [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Stashing vm_state: stopped {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1374.832359] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.832359] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1375.337523] env[68279]: INFO nova.compute.claims [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1375.843550] env[68279]: INFO nova.compute.resource_tracker [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating resource usage from migration 5ac7b468-c1e5-4bd3-a71b-88b26e1c6c2f [ 1375.888930] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae732046-5539-411f-8fda-28bb061253a0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.896603] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4704a3-aea2-4fe6-903e-2b5e1e2e6ec9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.925159] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4d1715-02f7-4dd4-ab3a-c021fc369a40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.931812] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c00a5b-cc5c-4795-80fb-0023ad2afefb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.944359] env[68279]: DEBUG nova.compute.provider_tree [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.448096] env[68279]: DEBUG nova.scheduler.client.report [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1376.953334] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.121s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.953560] env[68279]: INFO nova.compute.manager [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Migrating [ 1377.468225] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.468685] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1377.468685] env[68279]: DEBUG nova.network.neutron [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1378.177953] env[68279]: DEBUG nova.network.neutron [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.680679] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1380.195750] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deabdbd6-3b92-4fcc-8d77-13cc2a1f4cb9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.214434] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance '04f42241-5598-47e2-906c-998a19da434f' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1380.720941] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.721279] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26d419d4-2360-41be-add8-96df2fad4c1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.732331] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1380.732331] env[68279]: value = "task-2964119" [ 1380.732331] env[68279]: _type = "Task" [ 1380.732331] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.741669] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.243142] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1381.243523] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance '04f42241-5598-47e2-906c-998a19da434f' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1381.750418] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1381.750669] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1381.750821] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1381.751013] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1381.751167] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1381.751314] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1381.751524] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1381.751707] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1381.751878] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1381.752055] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1381.752237] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1381.757275] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35c92686-43e7-42f1-a00e-bf1ca4071e78 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.773678] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1381.773678] env[68279]: value = "task-2964120" [ 1381.773678] env[68279]: _type = "Task" [ 1381.773678] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.781676] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964120, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.284779] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964120, 'name': ReconfigVM_Task, 'duration_secs': 0.145848} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.285225] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance '04f42241-5598-47e2-906c-998a19da434f' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1382.791084] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1382.791424] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.791499] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1382.791673] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.791819] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1382.791963] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1382.792178] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1382.792338] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1382.792503] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1382.792662] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1382.792832] env[68279]: DEBUG nova.virt.hardware [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1382.798066] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1382.798368] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc3c055b-de55-4207-8c43-693f8e21ba80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.817274] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1382.817274] env[68279]: value = "task-2964121" [ 1382.817274] env[68279]: _type = "Task" [ 1382.817274] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.824870] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964121, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.327180] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964121, 'name': ReconfigVM_Task, 'duration_secs': 0.150016} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.327561] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1383.328182] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1102d77-cb24-462d-86d6-3236dbabda1f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.349863] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 04f42241-5598-47e2-906c-998a19da434f/04f42241-5598-47e2-906c-998a19da434f.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.350407] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a64f7d8d-22fd-44d6-818b-66e67a623e66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.367896] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1383.367896] env[68279]: value = "task-2964122" [ 1383.367896] env[68279]: _type = "Task" [ 1383.367896] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.374979] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964122, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.877061] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.378172] env[68279]: DEBUG oslo_vmware.api [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964122, 'name': ReconfigVM_Task, 'duration_secs': 0.80283} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.378606] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 04f42241-5598-47e2-906c-998a19da434f/04f42241-5598-47e2-906c-998a19da434f.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.378709] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance '04f42241-5598-47e2-906c-998a19da434f' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1384.885257] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00da6989-f7f3-477a-b325-abfa87d07387 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.904781] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0ba9db-6655-4712-865a-f95472f4ba28 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.922221] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance '04f42241-5598-47e2-906c-998a19da434f' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1385.459248] env[68279]: DEBUG nova.network.neutron [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Port a2750b33-4e6b-438f-8a02-3c67fc83b7d7 binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1386.482133] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "04f42241-5598-47e2-906c-998a19da434f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1386.482458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1386.482545] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.516078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.516335] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1387.516436] env[68279]: DEBUG nova.network.neutron [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1388.168214] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.206484] env[68279]: DEBUG nova.network.neutron [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.709789] env[68279]: DEBUG oslo_concurrency.lockutils [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1389.168553] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1389.168801] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1389.168947] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1389.234111] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0f6261-d02e-4708-911a-6c414c282054 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.253208] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddffb83a-2d3d-4533-9b1f-0cfb04c05eb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.260087] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance '04f42241-5598-47e2-906c-998a19da434f' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1389.766733] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-662582e7-84ab-4b85-981e-08174ba8b5ab tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance '04f42241-5598-47e2-906c-998a19da434f' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1390.164695] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.169021] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.671039] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.671313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.671485] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1390.671642] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1390.672555] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93456a0-9cdf-4e48-8629-0edd8a51e3e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.680717] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ba0829-af5b-4334-bda4-d53b865275b7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.694180] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad5c243-7d2b-4301-b5a9-3d8c367df021 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.700132] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c20e16-42c1-4a0b-92ab-2b9f69274c35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.728075] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180447MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1390.728216] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.728423] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.735581] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Applying migration context for instance 04f42241-5598-47e2-906c-998a19da434f as it has an incoming, in-progress migration 5ac7b468-c1e5-4bd3-a71b-88b26e1c6c2f. Migration status is finished {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1391.736237] env[68279]: INFO nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating resource usage from migration 5ac7b468-c1e5-4bd3-a71b-88b26e1c6c2f [ 1391.752172] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance e6ea9225-fdae-4ba2-859d-f0293e10e0bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.752322] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Migration 5ac7b468-c1e5-4bd3-a71b-88b26e1c6c2f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1391.752451] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance 04f42241-5598-47e2-906c-998a19da434f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.752621] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1391.752754] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1391.793943] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5674c5a1-f7ad-41ce-83a5-bb0476c86d91 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.801662] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d81767-8a10-4c36-97cd-137a0d957967 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.831345] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca69874-f547-49c5-bd89-f8a57bd44761 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.838329] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5805616-e433-4afe-a2c0-883aa3472f15 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.850836] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1392.354102] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1392.537440] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "04f42241-5598-47e2-906c-998a19da434f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.537838] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.538160] env[68279]: DEBUG nova.compute.manager [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Going to confirm migration 8 {{(pid=68279) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1392.859418] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1392.859823] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.131s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.110180] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.110378] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1393.110560] env[68279]: DEBUG nova.network.neutron [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1393.110749] env[68279]: DEBUG nova.objects.instance [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'info_cache' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1394.306439] env[68279]: DEBUG nova.network.neutron [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.809649] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1394.809921] env[68279]: DEBUG nova.objects.instance [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'migration_context' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1395.313552] env[68279]: DEBUG nova.objects.base [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Object Instance<04f42241-5598-47e2-906c-998a19da434f> lazy-loaded attributes: info_cache,migration_context {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1395.314571] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9d4e4c-e963-4e46-a493-c1614d0f081f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.334172] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cc0120e-332c-4f1f-80fa-b7cb59bbf73a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.339772] env[68279]: DEBUG oslo_vmware.api [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1395.339772] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d87a7a-885e-54fe-8435-8c4c44d0b1e6" [ 1395.339772] env[68279]: _type = "Task" [ 1395.339772] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.347061] env[68279]: DEBUG oslo_vmware.api [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d87a7a-885e-54fe-8435-8c4c44d0b1e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.849971] env[68279]: DEBUG oslo_vmware.api [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d87a7a-885e-54fe-8435-8c4c44d0b1e6, 'name': SearchDatastore_Task, 'duration_secs': 0.009682} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.850279] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.850513] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.860109] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1395.860304] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.408768] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a757c35-b7b5-493d-8afb-d3ba100171e1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.416252] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7e2d2d-ad91-414f-b0ad-87ec44eff77c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.447923] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dedf5ec-b084-4859-82f2-9b1e11c44bf3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.455720] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b663ba-3151-42ee-9421-30b53e2b676c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.468182] env[68279]: DEBUG nova.compute.provider_tree [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.971925] env[68279]: DEBUG nova.scheduler.client.report [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1397.983085] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.132s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.983480] env[68279]: DEBUG nova.compute.manager [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68279) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1398.168700] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1398.539049] env[68279]: INFO nova.scheduler.client.report [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted allocation for migration 5ac7b468-c1e5-4bd3-a71b-88b26e1c6c2f [ 1399.044203] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e97915fa-732f-4a18-87f0-38809b6aefb2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.506s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1399.163223] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.355717] env[68279]: DEBUG nova.objects.instance [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'flavor' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1399.860584] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.860797] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1399.860990] env[68279]: DEBUG nova.network.neutron [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1399.861194] env[68279]: DEBUG nova.objects.instance [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'info_cache' on Instance uuid 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1400.364817] env[68279]: DEBUG nova.objects.base [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Object Instance<04f42241-5598-47e2-906c-998a19da434f> lazy-loaded attributes: flavor,info_cache {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1401.066942] env[68279]: DEBUG nova.network.neutron [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [{"id": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "address": "fa:16:3e:94:72:92", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2750b33-4e", "ovs_interfaceid": "a2750b33-4e6b-438f-8a02-3c67fc83b7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.570797] env[68279]: DEBUG oslo_concurrency.lockutils [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-04f42241-5598-47e2-906c-998a19da434f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1402.576874] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1402.577222] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62342844-5662-4128-9671-169c5757632d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.587508] env[68279]: DEBUG oslo_vmware.api [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1402.587508] env[68279]: value = "task-2964123" [ 1402.587508] env[68279]: _type = "Task" [ 1402.587508] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.595422] env[68279]: DEBUG oslo_vmware.api [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964123, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.097085] env[68279]: DEBUG oslo_vmware.api [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964123, 'name': PowerOnVM_Task, 'duration_secs': 0.370047} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.097330] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1403.097559] env[68279]: DEBUG nova.compute.manager [None req-78d82b2d-771d-4e63-82d3-2c58c2e6199f tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1403.098318] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce959ed-e937-47cd-a5d8-d5d9d86628ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.856280] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "04f42241-5598-47e2-906c-998a19da434f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.856692] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1403.856740] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "04f42241-5598-47e2-906c-998a19da434f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.856908] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1403.857085] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.859096] env[68279]: INFO nova.compute.manager [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Terminating instance [ 1404.363815] env[68279]: DEBUG nova.compute.manager [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1404.364067] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1404.366012] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c662d9-bebf-416d-986f-49e8645864b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.372435] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1404.372658] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7b0d3ad-6b69-4a95-b2ab-573c7cb95e36 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.378287] env[68279]: DEBUG oslo_vmware.api [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1404.378287] env[68279]: value = "task-2964124" [ 1404.378287] env[68279]: _type = "Task" [ 1404.378287] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.386808] env[68279]: DEBUG oslo_vmware.api [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.888573] env[68279]: DEBUG oslo_vmware.api [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964124, 'name': PowerOffVM_Task, 'duration_secs': 0.174121} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.889012] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1404.889012] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1404.889260] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cca3235b-0da7-4394-89d4-47fa20d0819a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.950567] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1404.950824] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1404.951016] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleting the datastore file [datastore1] 04f42241-5598-47e2-906c-998a19da434f {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1404.951347] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f572b6a-f1ad-4020-bef2-830f96de0974 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.957877] env[68279]: DEBUG oslo_vmware.api [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1404.957877] env[68279]: value = "task-2964126" [ 1404.957877] env[68279]: _type = "Task" [ 1404.957877] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.965724] env[68279]: DEBUG oslo_vmware.api [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964126, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.466869] env[68279]: DEBUG oslo_vmware.api [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964126, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129307} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.467160] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1405.467316] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1405.467502] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1405.467761] env[68279]: INFO nova.compute.manager [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: 04f42241-5598-47e2-906c-998a19da434f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1405.468035] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1405.468234] env[68279]: DEBUG nova.compute.manager [-] [instance: 04f42241-5598-47e2-906c-998a19da434f] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1405.468331] env[68279]: DEBUG nova.network.neutron [-] [instance: 04f42241-5598-47e2-906c-998a19da434f] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1405.890561] env[68279]: DEBUG nova.compute.manager [req-ae294012-c445-4f3c-b33e-b61daed02fea req-9d4daca2-a03e-4e2c-8392-5796dcd4c58f service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Received event network-vif-deleted-a2750b33-4e6b-438f-8a02-3c67fc83b7d7 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1405.890793] env[68279]: INFO nova.compute.manager [req-ae294012-c445-4f3c-b33e-b61daed02fea req-9d4daca2-a03e-4e2c-8392-5796dcd4c58f service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Neutron deleted interface a2750b33-4e6b-438f-8a02-3c67fc83b7d7; detaching it from the instance and deleting it from the info cache [ 1405.890943] env[68279]: DEBUG nova.network.neutron [req-ae294012-c445-4f3c-b33e-b61daed02fea req-9d4daca2-a03e-4e2c-8392-5796dcd4c58f service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.375441] env[68279]: DEBUG nova.network.neutron [-] [instance: 04f42241-5598-47e2-906c-998a19da434f] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.393752] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9136c09-794f-4ae6-bf61-c85d51bd55e2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.406770] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1c9e0e-a017-4795-b4e1-b3ce94004ad8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.431597] env[68279]: DEBUG nova.compute.manager [req-ae294012-c445-4f3c-b33e-b61daed02fea req-9d4daca2-a03e-4e2c-8392-5796dcd4c58f service nova] [instance: 04f42241-5598-47e2-906c-998a19da434f] Detach interface failed, port_id=a2750b33-4e6b-438f-8a02-3c67fc83b7d7, reason: Instance 04f42241-5598-47e2-906c-998a19da434f could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1406.878510] env[68279]: INFO nova.compute.manager [-] [instance: 04f42241-5598-47e2-906c-998a19da434f] Took 1.41 seconds to deallocate network for instance. [ 1407.385313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.385713] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.385821] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.408753] env[68279]: INFO nova.scheduler.client.report [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted allocations for instance 04f42241-5598-47e2-906c-998a19da434f [ 1407.917726] env[68279]: DEBUG oslo_concurrency.lockutils [None req-4a4d3239-26d3-4958-9f1a-72dec3a53587 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "04f42241-5598-47e2-906c-998a19da434f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.061s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1408.731172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1408.731172] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1409.236767] env[68279]: DEBUG nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1409.755976] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1409.756313] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1409.757875] env[68279]: INFO nova.compute.claims [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1409.809064] env[68279]: DEBUG nova.compute.manager [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1410.326363] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.811385] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b520cf24-3123-4644-ba5c-84e8ec97bd4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.819051] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33419996-ccde-489c-96b2-0daee7d820d0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.849184] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d081c380-176f-46a9-b83e-1d35b3d4362f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.855769] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff38ef5f-d17c-4c21-9c87-be640b261daa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.868388] env[68279]: DEBUG nova.compute.provider_tree [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.371269] env[68279]: DEBUG nova.scheduler.client.report [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1411.876518] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.120s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.877210] env[68279]: DEBUG nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1411.880407] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.554s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.384088] env[68279]: DEBUG nova.compute.utils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1412.387644] env[68279]: INFO nova.compute.claims [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1412.391181] env[68279]: DEBUG nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1412.391350] env[68279]: DEBUG nova.network.neutron [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1412.433918] env[68279]: DEBUG nova.policy [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0f0f631a27f4d93bcc70956d721d9ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c1ad13d5de94b14ab00b7f003c1851d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1412.684232] env[68279]: DEBUG nova.network.neutron [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Successfully created port: c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1412.892488] env[68279]: DEBUG nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1412.896426] env[68279]: INFO nova.compute.resource_tracker [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating resource usage from migration c30f6081-fc61-4bb4-9890-2dc8dc25563c [ 1412.947019] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26189f74-20a8-4e54-b7a6-5b9ab3654a5e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.953717] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4545dd-3e68-410b-bab9-87be79d9a4ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.985100] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb3f7cd-acf2-4c25-b60e-43e1579b2e03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.992026] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cedfd4-88af-4b28-abbd-13583e13fd58 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.004478] env[68279]: DEBUG nova.compute.provider_tree [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.507283] env[68279]: DEBUG nova.scheduler.client.report [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1413.909870] env[68279]: DEBUG nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1413.936507] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1413.936781] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1413.936944] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1413.937145] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1413.937294] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1413.937444] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1413.937668] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1413.937839] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1413.938014] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1413.938187] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1413.938359] env[68279]: DEBUG nova.virt.hardware [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1413.939265] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f593c1-e468-429e-95d4-a7136861d6b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.947030] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae7bf36-7014-4a70-b543-411b5bc37bbf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.012127] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.132s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.012350] env[68279]: INFO nova.compute.manager [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Migrating [ 1414.027183] env[68279]: DEBUG nova.compute.manager [req-252f0ef7-0639-4388-9d8b-e6e36c9cffcd req-76900f8b-4dc3-47cd-9bfb-d5e25680bd50 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Received event network-vif-plugged-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1414.027386] env[68279]: DEBUG oslo_concurrency.lockutils [req-252f0ef7-0639-4388-9d8b-e6e36c9cffcd req-76900f8b-4dc3-47cd-9bfb-d5e25680bd50 service nova] Acquiring lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.027587] env[68279]: DEBUG oslo_concurrency.lockutils [req-252f0ef7-0639-4388-9d8b-e6e36c9cffcd req-76900f8b-4dc3-47cd-9bfb-d5e25680bd50 service nova] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.027753] env[68279]: DEBUG oslo_concurrency.lockutils [req-252f0ef7-0639-4388-9d8b-e6e36c9cffcd req-76900f8b-4dc3-47cd-9bfb-d5e25680bd50 service nova] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.027917] env[68279]: DEBUG nova.compute.manager [req-252f0ef7-0639-4388-9d8b-e6e36c9cffcd req-76900f8b-4dc3-47cd-9bfb-d5e25680bd50 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] No waiting events found dispatching network-vif-plugged-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1414.028093] env[68279]: WARNING nova.compute.manager [req-252f0ef7-0639-4388-9d8b-e6e36c9cffcd req-76900f8b-4dc3-47cd-9bfb-d5e25680bd50 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Received unexpected event network-vif-plugged-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba for instance with vm_state building and task_state spawning. [ 1414.105943] env[68279]: DEBUG nova.network.neutron [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Successfully updated port: c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1414.529590] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.529848] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.530052] env[68279]: DEBUG nova.network.neutron [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.611116] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.611207] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.611436] env[68279]: DEBUG nova.network.neutron [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1415.139038] env[68279]: DEBUG nova.network.neutron [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1415.239345] env[68279]: DEBUG nova.network.neutron [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.271835] env[68279]: DEBUG nova.network.neutron [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.741736] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.774193] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.774481] env[68279]: DEBUG nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Instance network_info: |[{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1415.774876] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:f5:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.782369] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1415.782565] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.782783] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8eb9482d-9e88-44f7-ac2a-6be2cef8ef3d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.803375] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.803375] env[68279]: value = "task-2964127" [ 1415.803375] env[68279]: _type = "Task" [ 1415.803375] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.812281] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964127, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.050516] env[68279]: DEBUG nova.compute.manager [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Received event network-changed-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1416.050759] env[68279]: DEBUG nova.compute.manager [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Refreshing instance network info cache due to event network-changed-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1416.050913] env[68279]: DEBUG oslo_concurrency.lockutils [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] Acquiring lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.051072] env[68279]: DEBUG oslo_concurrency.lockutils [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] Acquired lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.051240] env[68279]: DEBUG nova.network.neutron [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Refreshing network info cache for port c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1416.313190] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964127, 'name': CreateVM_Task, 'duration_secs': 0.434195} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.313533] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1416.313947] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.314126] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.314452] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1416.314706] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-324cb373-cfb1-4531-8f62-849da0cad611 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.318910] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1416.318910] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5212330f-d82f-1ee9-2ea1-9efcee249aec" [ 1416.318910] env[68279]: _type = "Task" [ 1416.318910] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.325788] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5212330f-d82f-1ee9-2ea1-9efcee249aec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.755146] env[68279]: DEBUG nova.network.neutron [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updated VIF entry in instance network info cache for port c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.755585] env[68279]: DEBUG nova.network.neutron [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.833413] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5212330f-d82f-1ee9-2ea1-9efcee249aec, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.833820] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1416.834139] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1416.834403] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.834619] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.834883] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1416.835222] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4ebde1e-2831-4e99-84d0-0aa2ac76071f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.844151] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1416.844354] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1416.845177] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c43a9f3-0beb-4915-9b71-8a94700a3fa6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.850584] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1416.850584] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521ffe72-75fb-8c8f-9345-48258920609d" [ 1416.850584] env[68279]: _type = "Task" [ 1416.850584] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.858730] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521ffe72-75fb-8c8f-9345-48258920609d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.255714] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397507e6-db05-4797-83a6-a00783beb390 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.260148] env[68279]: DEBUG oslo_concurrency.lockutils [req-8ea41514-184a-4786-8eb0-6d972f5f50a8 req-f5ead994-890d-4819-be52-71bd6accc204 service nova] Releasing lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1417.275513] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance 'e6ea9225-fdae-4ba2-859d-f0293e10e0bf' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1417.361046] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]521ffe72-75fb-8c8f-9345-48258920609d, 'name': SearchDatastore_Task, 'duration_secs': 0.008462} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.361808] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e62559c4-e75a-4ece-ae34-5091d042e35f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.366591] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1417.366591] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523ec31f-4900-ca3d-3e30-5483252352e2" [ 1417.366591] env[68279]: _type = "Task" [ 1417.366591] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.373801] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523ec31f-4900-ca3d-3e30-5483252352e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.781481] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1417.781743] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6935a06-2814-4011-a6d2-993ea1e7fd39 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.791819] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1417.791819] env[68279]: value = "task-2964128" [ 1417.791819] env[68279]: _type = "Task" [ 1417.791819] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.799647] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.876577] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523ec31f-4900-ca3d-3e30-5483252352e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009532} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.876741] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1417.877016] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1417.877275] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39191a19-e8a4-41d0-84ec-1e75c1e6fddf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.883172] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1417.883172] env[68279]: value = "task-2964129" [ 1417.883172] env[68279]: _type = "Task" [ 1417.883172] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.890968] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.301692] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964128, 'name': PowerOffVM_Task, 'duration_secs': 0.182355} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.301966] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1418.302164] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance 'e6ea9225-fdae-4ba2-859d-f0293e10e0bf' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1418.392551] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.427331} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.393031] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.393220] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1418.393334] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-372c5792-c303-454e-96c6-e744e0ad47ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.399643] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1418.399643] env[68279]: value = "task-2964130" [ 1418.399643] env[68279]: _type = "Task" [ 1418.399643] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.406964] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.808800] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1418.809108] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1418.809280] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1418.809464] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1418.809610] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1418.809758] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1418.809975] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1418.810154] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1418.810322] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1418.810481] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1418.810654] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1418.815576] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b08f192-41b0-452d-82dc-5e502748db1b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.831917] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1418.831917] env[68279]: value = "task-2964131" [ 1418.831917] env[68279]: _type = "Task" [ 1418.831917] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.840040] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964131, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.909284] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060925} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.909552] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1418.910372] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5437f12a-0d5e-4fe0-9d07-784f70c10bd1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.933245] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1418.933525] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af0b5f5e-6fcd-4e1d-a5dc-8c95582a8269 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.951836] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1418.951836] env[68279]: value = "task-2964132" [ 1418.951836] env[68279]: _type = "Task" [ 1418.951836] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.959508] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964132, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.341846] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964131, 'name': ReconfigVM_Task, 'duration_secs': 0.397935} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.342162] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance 'e6ea9225-fdae-4ba2-859d-f0293e10e0bf' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1419.462308] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964132, 'name': ReconfigVM_Task, 'duration_secs': 0.300018} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.462711] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfigured VM instance instance-0000007c to attach disk [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1419.463230] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cea9d665-b02d-4c89-aa2e-eaf15fffb2ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.469880] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1419.469880] env[68279]: value = "task-2964133" [ 1419.469880] env[68279]: _type = "Task" [ 1419.469880] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.477074] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964133, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.848803] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1419.849068] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.849239] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1419.849428] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.849577] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1419.849727] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1419.849934] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1419.850109] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1419.850280] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1419.850445] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1419.850618] env[68279]: DEBUG nova.virt.hardware [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1419.855895] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1419.856202] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28abfba5-7c19-4030-bf49-057649afd3dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.874593] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1419.874593] env[68279]: value = "task-2964134" [ 1419.874593] env[68279]: _type = "Task" [ 1419.874593] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.882238] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964134, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.980355] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964133, 'name': Rename_Task, 'duration_secs': 0.135286} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.980646] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.980854] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd028743-ca32-4a19-9bdc-f3b31accf18a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.988247] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1419.988247] env[68279]: value = "task-2964135" [ 1419.988247] env[68279]: _type = "Task" [ 1419.988247] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.995658] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964135, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.384392] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964134, 'name': ReconfigVM_Task, 'duration_secs': 0.158169} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.384773] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1420.385629] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6655338c-b3c1-458c-8da1-6478808820d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.408102] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1420.408723] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2751930-8121-46d9-9711-024668bbd737 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.426735] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1420.426735] env[68279]: value = "task-2964136" [ 1420.426735] env[68279]: _type = "Task" [ 1420.426735] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.435765] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964136, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.499563] env[68279]: DEBUG oslo_vmware.api [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964135, 'name': PowerOnVM_Task, 'duration_secs': 0.43495} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.500072] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1420.500158] env[68279]: INFO nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Took 6.59 seconds to spawn the instance on the hypervisor. [ 1420.500342] env[68279]: DEBUG nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1420.501107] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ab1c27-5326-4d39-81ae-8793e5c409c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.936770] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964136, 'name': ReconfigVM_Task, 'duration_secs': 0.259955} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.936996] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfigured VM instance instance-0000007b to attach disk [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1420.937268] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance 'e6ea9225-fdae-4ba2-859d-f0293e10e0bf' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1421.016672] env[68279]: INFO nova.compute.manager [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Took 11.27 seconds to build instance. [ 1421.444208] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8d495f-8d6f-4c0e-96b3-3668113b0f05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.463260] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6121a6-547f-4ec5-85c2-b70be11a33ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.479801] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance 'e6ea9225-fdae-4ba2-859d-f0293e10e0bf' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1421.518603] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b6ec5e95-d458-4db8-9c22-2b3afceefa2c tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.788s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.645342] env[68279]: DEBUG nova.compute.manager [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Received event network-changed-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1421.645567] env[68279]: DEBUG nova.compute.manager [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Refreshing instance network info cache due to event network-changed-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1421.645726] env[68279]: DEBUG oslo_concurrency.lockutils [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] Acquiring lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.645872] env[68279]: DEBUG oslo_concurrency.lockutils [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] Acquired lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1421.646042] env[68279]: DEBUG nova.network.neutron [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Refreshing network info cache for port c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.016455] env[68279]: DEBUG nova.network.neutron [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Port 64d7b0c3-6342-46df-b3f7-dd7387e97727 binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1422.353916] env[68279]: DEBUG nova.network.neutron [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updated VIF entry in instance network info cache for port c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.354295] env[68279]: DEBUG nova.network.neutron [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.856937] env[68279]: DEBUG oslo_concurrency.lockutils [req-092a22b5-f556-4ebb-a6bc-e5504bdb7d3d req-006ee1b6-f8d1-43bd-b3b0-0158bf80cccb service nova] Releasing lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1423.038000] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1423.038305] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1423.038555] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1424.094490] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.094759] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1424.094890] env[68279]: DEBUG nova.network.neutron [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.779957] env[68279]: DEBUG nova.network.neutron [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.282688] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.806458] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2109a7ce-82d5-42be-9fc0-96b642baa6bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.825134] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4e9f1f-364b-4b08-99f8-54269fbc7964 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.831624] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance 'e6ea9225-fdae-4ba2-859d-f0293e10e0bf' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1426.337609] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1426.337958] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72bf8cd5-0a9e-418b-b938-2da1500688e0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.346338] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1426.346338] env[68279]: value = "task-2964137" [ 1426.346338] env[68279]: _type = "Task" [ 1426.346338] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.355610] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.856695] env[68279]: DEBUG oslo_vmware.api [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964137, 'name': PowerOnVM_Task, 'duration_secs': 0.368496} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.857042] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1426.857188] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b60b9-6ffc-4c02-a354-0a51e1d6638e tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance 'e6ea9225-fdae-4ba2-859d-f0293e10e0bf' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1429.165243] env[68279]: DEBUG nova.network.neutron [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Port 64d7b0c3-6342-46df-b3f7-dd7387e97727 binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1429.165563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.165665] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1429.165826] env[68279]: DEBUG nova.network.neutron [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1429.896657] env[68279]: DEBUG nova.network.neutron [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.399786] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1430.903821] env[68279]: DEBUG nova.compute.manager [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68279) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1430.904070] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1430.904326] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.407555] env[68279]: DEBUG nova.objects.instance [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'migration_context' on Instance uuid e6ea9225-fdae-4ba2-859d-f0293e10e0bf {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1431.963794] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fcdbd3-2de3-45a4-a92d-7a96852c209a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.971138] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924c4210-7858-4e1a-b11b-e997c2cd22e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.999948] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87631d8a-9b82-468b-ae42-56671791b931 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.006687] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcf6985-e43d-407c-9eee-5ab7040bd36c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.019346] env[68279]: DEBUG nova.compute.provider_tree [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.522437] env[68279]: DEBUG nova.scheduler.client.report [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1433.533450] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.629s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1435.067461] env[68279]: INFO nova.compute.manager [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Swapping old allocation on dict_keys(['40ba16cf-8244-4715-b8c1-975029462ee4']) held by migration c30f6081-fc61-4bb4-9890-2dc8dc25563c for instance [ 1435.087968] env[68279]: DEBUG nova.scheduler.client.report [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Overwriting current allocation {'allocations': {'40ba16cf-8244-4715-b8c1-975029462ee4': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 177}}, 'project_id': '7d7a90a39b864e3e985b3b828c3fd363', 'user_id': '94e9d9c1927948f5bb8f42235b09f008', 'consumer_generation': 1} on consumer e6ea9225-fdae-4ba2-859d-f0293e10e0bf {{(pid=68279) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1435.178768] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.178998] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1435.179191] env[68279]: DEBUG nova.network.neutron [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1435.878989] env[68279]: DEBUG nova.network.neutron [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [{"id": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "address": "fa:16:3e:35:98:3f", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d7b0c3-63", "ovs_interfaceid": "64d7b0c3-6342-46df-b3f7-dd7387e97727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.381425] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-e6ea9225-fdae-4ba2-859d-f0293e10e0bf" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1436.381891] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1436.382244] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67fa578f-d3b2-4b7f-9c77-ff24bfdb71c5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.390062] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1436.390062] env[68279]: value = "task-2964138" [ 1436.390062] env[68279]: _type = "Task" [ 1436.390062] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.398258] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.899331] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964138, 'name': PowerOffVM_Task, 'duration_secs': 0.195393} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.899661] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1436.900184] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1436.900408] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1436.900566] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1436.900741] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1436.900886] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1436.901041] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1436.901249] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1436.901408] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1436.901567] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1436.901724] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1436.901898] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1436.906686] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94cf5e45-e2cc-4cbe-98ef-81b7195d4946 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.921687] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1436.921687] env[68279]: value = "task-2964139" [ 1436.921687] env[68279]: _type = "Task" [ 1436.921687] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.928679] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.431901] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964139, 'name': ReconfigVM_Task, 'duration_secs': 0.1247} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.432752] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f68d0b-f4cf-4858-a0a8-fab2c7f8a10a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.450909] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1437.451172] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1437.451334] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1437.451516] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1437.451663] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1437.451810] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1437.452020] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1437.452186] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1437.452356] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1437.452516] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1437.452707] env[68279]: DEBUG nova.virt.hardware [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1437.453626] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6838a06-8aec-4877-a385-b537a609ba8f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.458602] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1437.458602] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52904371-bec5-63db-0df0-e7d1ad6d6700" [ 1437.458602] env[68279]: _type = "Task" [ 1437.458602] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.466288] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52904371-bec5-63db-0df0-e7d1ad6d6700, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.968506] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52904371-bec5-63db-0df0-e7d1ad6d6700, 'name': SearchDatastore_Task, 'duration_secs': 0.007894} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.973849] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1437.974166] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e003917f-3735-4951-b46f-6de864196068 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.992375] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1437.992375] env[68279]: value = "task-2964140" [ 1437.992375] env[68279]: _type = "Task" [ 1437.992375] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.001014] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964140, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.502121] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964140, 'name': ReconfigVM_Task, 'duration_secs': 0.172199} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.502403] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1438.503183] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23091ea5-07b4-4e14-b006-0107896cb959 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.524413] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.524670] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51b2148e-1586-46f4-8f9a-b74c68ddf482 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.542668] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1438.542668] env[68279]: value = "task-2964141" [ 1438.542668] env[68279]: _type = "Task" [ 1438.542668] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.550152] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964141, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.052800] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964141, 'name': ReconfigVM_Task, 'duration_secs': 0.252106} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.053180] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Reconfigured VM instance instance-0000007b to attach disk [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf/e6ea9225-fdae-4ba2-859d-f0293e10e0bf.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1439.053988] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21acdfa5-8a5e-4a30-ab50-5db5c0ba9e07 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.071524] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d653d929-1f60-4f3b-afa5-8891407ca5fa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.088544] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cf21ba-9b29-453b-96bf-1bf7d0488bf4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.105445] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f241c8-a983-4eaa-ac0b-44ee8f867c42 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.111588] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.111810] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bee2db4d-0626-4b82-9ea1-5e4b2e7aa8f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.118138] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1439.118138] env[68279]: value = "task-2964142" [ 1439.118138] env[68279]: _type = "Task" [ 1439.118138] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.133605] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.628302] env[68279]: DEBUG oslo_vmware.api [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964142, 'name': PowerOnVM_Task, 'duration_secs': 0.334277} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.628572] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.640054] env[68279]: INFO nova.compute.manager [None req-a2b64440-3f56-48d3-88a0-488e88953664 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance to original state: 'active' [ 1441.497012] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.497319] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1441.497532] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.498187] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1441.498384] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1441.500526] env[68279]: INFO nova.compute.manager [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Terminating instance [ 1442.004201] env[68279]: DEBUG nova.compute.manager [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1442.004704] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1442.005345] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef02c2ff-f639-49de-899b-72cf86879659 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.013901] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1442.014157] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf0499e1-8da3-4055-97fa-82a8594b1411 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.021580] env[68279]: DEBUG oslo_vmware.api [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1442.021580] env[68279]: value = "task-2964143" [ 1442.021580] env[68279]: _type = "Task" [ 1442.021580] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.029904] env[68279]: DEBUG oslo_vmware.api [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.531648] env[68279]: DEBUG oslo_vmware.api [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964143, 'name': PowerOffVM_Task, 'duration_secs': 0.162246} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.531648] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1442.531886] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1442.531986] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b340999-76b9-4154-95a5-b4a431a28830 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.595817] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1442.596043] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1442.596237] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleting the datastore file [datastore1] e6ea9225-fdae-4ba2-859d-f0293e10e0bf {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1442.596512] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e38c2de8-5d5c-4112-a086-7e38432adf37 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.603653] env[68279]: DEBUG oslo_vmware.api [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1442.603653] env[68279]: value = "task-2964145" [ 1442.603653] env[68279]: _type = "Task" [ 1442.603653] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.610781] env[68279]: DEBUG oslo_vmware.api [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964145, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.113456] env[68279]: DEBUG oslo_vmware.api [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964145, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134858} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.113850] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1443.113896] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1443.114057] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1443.114238] env[68279]: INFO nova.compute.manager [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1443.114471] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1443.114655] env[68279]: DEBUG nova.compute.manager [-] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1443.114750] env[68279]: DEBUG nova.network.neutron [-] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1443.545101] env[68279]: DEBUG nova.compute.manager [req-8c3b6b27-56a1-4af0-9bea-ceacff707b1f req-166523a4-5132-4a92-9db1-0fe1f2ca59b8 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Received event network-vif-deleted-64d7b0c3-6342-46df-b3f7-dd7387e97727 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1443.545299] env[68279]: INFO nova.compute.manager [req-8c3b6b27-56a1-4af0-9bea-ceacff707b1f req-166523a4-5132-4a92-9db1-0fe1f2ca59b8 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Neutron deleted interface 64d7b0c3-6342-46df-b3f7-dd7387e97727; detaching it from the instance and deleting it from the info cache [ 1443.545410] env[68279]: DEBUG nova.network.neutron [req-8c3b6b27-56a1-4af0-9bea-ceacff707b1f req-166523a4-5132-4a92-9db1-0fe1f2ca59b8 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.030208] env[68279]: DEBUG nova.network.neutron [-] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.048024] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57a34d22-0436-4b27-95f0-080438a6b997 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.058883] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c07cc2-74e1-4900-9f8a-0f65a543f899 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.083458] env[68279]: DEBUG nova.compute.manager [req-8c3b6b27-56a1-4af0-9bea-ceacff707b1f req-166523a4-5132-4a92-9db1-0fe1f2ca59b8 service nova] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Detach interface failed, port_id=64d7b0c3-6342-46df-b3f7-dd7387e97727, reason: Instance e6ea9225-fdae-4ba2-859d-f0293e10e0bf could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1444.533332] env[68279]: INFO nova.compute.manager [-] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Took 1.42 seconds to deallocate network for instance. [ 1445.040773] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1445.042086] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.042086] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.064048] env[68279]: INFO nova.scheduler.client.report [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted allocations for instance e6ea9225-fdae-4ba2-859d-f0293e10e0bf [ 1445.571661] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2442c775-8060-4a0a-9bd8-530d2a7407b1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "e6ea9225-fdae-4ba2-859d-f0293e10e0bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.074s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1446.367846] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "c6071f0d-76f5-4415-b495-b0dbab00daca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1446.368107] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1446.870975] env[68279]: DEBUG nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1447.398442] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1447.398788] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1447.400942] env[68279]: INFO nova.compute.claims [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1448.167783] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.168195] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.447975] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5429d63b-93c5-4d6a-8e17-d14e7b9e1c02 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.455604] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba1f1d0-a2b7-4e08-869d-771a80b4ab7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.484558] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316f4268-b5d2-4e6c-9a23-a5ecbe990ff5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.491307] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8f67f3-48c3-42a2-82dc-ecffd37dbb98 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.503931] env[68279]: DEBUG nova.compute.provider_tree [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.006939] env[68279]: DEBUG nova.scheduler.client.report [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1449.512284] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.512815] env[68279]: DEBUG nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1450.017518] env[68279]: DEBUG nova.compute.utils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1450.018933] env[68279]: DEBUG nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1450.019119] env[68279]: DEBUG nova.network.neutron [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1450.063151] env[68279]: DEBUG nova.policy [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94e9d9c1927948f5bb8f42235b09f008', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d7a90a39b864e3e985b3b828c3fd363', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1450.320658] env[68279]: DEBUG nova.network.neutron [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Successfully created port: a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1450.524718] env[68279]: DEBUG nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1451.531700] env[68279]: DEBUG nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1451.558210] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1451.558456] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1451.558634] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1451.558785] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1451.558932] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1451.559094] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1451.559323] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1451.559502] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1451.559672] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1451.560070] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1451.560070] env[68279]: DEBUG nova.virt.hardware [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1451.560900] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556d92dc-a2ae-4a74-8e3b-850848192ffa {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.568984] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3122c5-c8a6-4f0f-a0b2-55be64e5ddb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.667371] env[68279]: DEBUG nova.compute.manager [req-3da6aa83-0127-429f-a2aa-2f79bf6baad2 req-5444aba4-c4f0-4b77-9ccb-ebe6ea4f7ce6 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Received event network-vif-plugged-a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1451.667639] env[68279]: DEBUG oslo_concurrency.lockutils [req-3da6aa83-0127-429f-a2aa-2f79bf6baad2 req-5444aba4-c4f0-4b77-9ccb-ebe6ea4f7ce6 service nova] Acquiring lock "c6071f0d-76f5-4415-b495-b0dbab00daca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1451.667882] env[68279]: DEBUG oslo_concurrency.lockutils [req-3da6aa83-0127-429f-a2aa-2f79bf6baad2 req-5444aba4-c4f0-4b77-9ccb-ebe6ea4f7ce6 service nova] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1451.668163] env[68279]: DEBUG oslo_concurrency.lockutils [req-3da6aa83-0127-429f-a2aa-2f79bf6baad2 req-5444aba4-c4f0-4b77-9ccb-ebe6ea4f7ce6 service nova] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1451.668435] env[68279]: DEBUG nova.compute.manager [req-3da6aa83-0127-429f-a2aa-2f79bf6baad2 req-5444aba4-c4f0-4b77-9ccb-ebe6ea4f7ce6 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] No waiting events found dispatching network-vif-plugged-a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1451.668506] env[68279]: WARNING nova.compute.manager [req-3da6aa83-0127-429f-a2aa-2f79bf6baad2 req-5444aba4-c4f0-4b77-9ccb-ebe6ea4f7ce6 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Received unexpected event network-vif-plugged-a32389b4-23aa-4395-99b6-f055ab5890c5 for instance with vm_state building and task_state spawning. [ 1451.670283] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1451.670501] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1451.670649] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1451.670811] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1451.746726] env[68279]: DEBUG nova.network.neutron [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Successfully updated port: a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1452.174295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.174295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1452.174295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1452.174662] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1452.175356] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730aa810-155e-4b67-943a-9189e04533dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.183788] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424cf70b-f26d-4982-ba7c-292aeec8298d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.198108] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5ac919-00ff-48b2-8fb1-fd0591d1f18f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.204882] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78c89ba-6f2b-45e5-9b71-802d5aed62e7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.235775] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180618MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1452.235929] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.236126] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1452.249702] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.249957] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1452.250022] env[68279]: DEBUG nova.network.neutron [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1452.781568] env[68279]: DEBUG nova.network.neutron [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1452.909305] env[68279]: DEBUG nova.network.neutron [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updating instance_info_cache with network_info: [{"id": "a32389b4-23aa-4395-99b6-f055ab5890c5", "address": "fa:16:3e:f2:00:65", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa32389b4-23", "ovs_interfaceid": "a32389b4-23aa-4395-99b6-f055ab5890c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.299944] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance b8d6f6fc-8bf6-46b2-8b35-94271c4e051d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1453.300152] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance c6071f0d-76f5-4415-b495-b0dbab00daca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1453.300348] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1453.300492] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1453.350578] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4f437d-c52e-4919-b7b7-f41020edd783 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.357497] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092a5e28-460c-477d-8b2d-f4707be4d4ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.387373] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cda452-9316-4ffe-85b7-5cbbb38829ef {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.394806] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec527ba-4751-4551-a9b5-84c9c8647da3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.409506] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.412781] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1453.414059] env[68279]: DEBUG nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Instance network_info: |[{"id": "a32389b4-23aa-4395-99b6-f055ab5890c5", "address": "fa:16:3e:f2:00:65", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa32389b4-23", "ovs_interfaceid": "a32389b4-23aa-4395-99b6-f055ab5890c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1453.414059] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:00:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '051f343d-ac4f-4070-a26d-467603122c81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a32389b4-23aa-4395-99b6-f055ab5890c5', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.420960] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1453.421883] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1453.422127] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cd1fa69-b2c7-4b14-9668-819197407887 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.441527] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.441527] env[68279]: value = "task-2964146" [ 1453.441527] env[68279]: _type = "Task" [ 1453.441527] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.449611] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964146, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.695909] env[68279]: DEBUG nova.compute.manager [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Received event network-changed-a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1453.696059] env[68279]: DEBUG nova.compute.manager [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Refreshing instance network info cache due to event network-changed-a32389b4-23aa-4395-99b6-f055ab5890c5. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1453.696297] env[68279]: DEBUG oslo_concurrency.lockutils [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] Acquiring lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.696474] env[68279]: DEBUG oslo_concurrency.lockutils [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] Acquired lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1453.696639] env[68279]: DEBUG nova.network.neutron [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Refreshing network info cache for port a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1453.912685] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1453.952024] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964146, 'name': CreateVM_Task, 'duration_secs': 0.301188} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.952242] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1453.952827] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.952990] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1453.953324] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1453.953578] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9085d396-0e6d-46ea-96cf-54ae9a3e205e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.957994] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1453.957994] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aa918b-369b-8c59-6473-0f3cede42a94" [ 1453.957994] env[68279]: _type = "Task" [ 1453.957994] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.965544] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aa918b-369b-8c59-6473-0f3cede42a94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.398840] env[68279]: DEBUG nova.network.neutron [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updated VIF entry in instance network info cache for port a32389b4-23aa-4395-99b6-f055ab5890c5. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1454.399202] env[68279]: DEBUG nova.network.neutron [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updating instance_info_cache with network_info: [{"id": "a32389b4-23aa-4395-99b6-f055ab5890c5", "address": "fa:16:3e:f2:00:65", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa32389b4-23", "ovs_interfaceid": "a32389b4-23aa-4395-99b6-f055ab5890c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.417086] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1454.417293] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.181s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1454.417502] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.417648] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Cleaning up deleted instances with incomplete migration {{(pid=68279) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1454.468509] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52aa918b-369b-8c59-6473-0f3cede42a94, 'name': SearchDatastore_Task, 'duration_secs': 0.010834} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.468792] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1454.469034] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1454.469276] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.469422] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1454.469599] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1454.469843] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ca89cb5-5a19-48bd-ae11-cfb145a9cd83 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.477762] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1454.477929] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1454.478612] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7062ed6-85d9-474e-bebd-e855e902ebb3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.483124] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1454.483124] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529402df-41b2-8498-3d18-7132302427cc" [ 1454.483124] env[68279]: _type = "Task" [ 1454.483124] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.489844] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529402df-41b2-8498-3d18-7132302427cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.901635] env[68279]: DEBUG oslo_concurrency.lockutils [req-2de97346-bfb7-439f-ad59-e5da85d7bb6e req-c120e9bf-a4da-4a33-911d-8bbb0bbecb08 service nova] Releasing lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1454.993590] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529402df-41b2-8498-3d18-7132302427cc, 'name': SearchDatastore_Task, 'duration_secs': 0.008288} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.994400] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df75c80d-1b50-4e3c-b2f6-746dda7334dd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.999053] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1454.999053] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ff913a-cff0-a651-7ab3-ef62cfd55948" [ 1454.999053] env[68279]: _type = "Task" [ 1454.999053] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.006960] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ff913a-cff0-a651-7ab3-ef62cfd55948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.413026] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.413317] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.510930] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52ff913a-cff0-a651-7ab3-ef62cfd55948, 'name': SearchDatastore_Task, 'duration_secs': 0.010356} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.511181] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1455.511447] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c6071f0d-76f5-4415-b495-b0dbab00daca/c6071f0d-76f5-4415-b495-b0dbab00daca.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1455.511685] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-476989fd-37b0-49ac-9a39-86c05b054d4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.517795] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1455.517795] env[68279]: value = "task-2964147" [ 1455.517795] env[68279]: _type = "Task" [ 1455.517795] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.524960] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.027736] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964147, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.425609} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.028116] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] c6071f0d-76f5-4415-b495-b0dbab00daca/c6071f0d-76f5-4415-b495-b0dbab00daca.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1456.028252] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1456.028474] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7dd76f61-5435-4185-8b1b-9be2f60c8a1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.033793] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1456.033793] env[68279]: value = "task-2964148" [ 1456.033793] env[68279]: _type = "Task" [ 1456.033793] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.040571] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964148, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.168364] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1456.543289] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964148, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063905} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.543544] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1456.544306] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e154e5-f242-43af-9edf-81ae30699a57 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.566033] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] c6071f0d-76f5-4415-b495-b0dbab00daca/c6071f0d-76f5-4415-b495-b0dbab00daca.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1456.566278] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf7f67e7-5498-4100-9fe4-32c5fb2875ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.584833] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1456.584833] env[68279]: value = "task-2964149" [ 1456.584833] env[68279]: _type = "Task" [ 1456.584833] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.592211] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.094611] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964149, 'name': ReconfigVM_Task, 'duration_secs': 0.260724} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.095066] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Reconfigured VM instance instance-0000007d to attach disk [datastore1] c6071f0d-76f5-4415-b495-b0dbab00daca/c6071f0d-76f5-4415-b495-b0dbab00daca.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1457.095566] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd0d2e46-957a-409c-b2ac-567015a7bb91 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.101067] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1457.101067] env[68279]: value = "task-2964150" [ 1457.101067] env[68279]: _type = "Task" [ 1457.101067] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.107923] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964150, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.610565] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964150, 'name': Rename_Task, 'duration_secs': 0.132576} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.610840] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1457.611235] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b46ef791-4b3f-40f9-8db5-826836fb19a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.617797] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1457.617797] env[68279]: value = "task-2964151" [ 1457.617797] env[68279]: _type = "Task" [ 1457.617797] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.625018] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.127870] env[68279]: DEBUG oslo_vmware.api [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964151, 'name': PowerOnVM_Task, 'duration_secs': 0.454289} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.128297] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1458.128388] env[68279]: INFO nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Took 6.60 seconds to spawn the instance on the hypervisor. [ 1458.128514] env[68279]: DEBUG nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1458.129309] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c1300a-9349-44d0-80fe-717089eec512 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.168041] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.649217] env[68279]: INFO nova.compute.manager [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Took 11.27 seconds to build instance. [ 1459.150908] env[68279]: DEBUG oslo_concurrency.lockutils [None req-e6dac552-0922-44f5-adef-2f22bdc644d3 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.783s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1459.568578] env[68279]: DEBUG nova.compute.manager [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Received event network-changed-a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1459.568879] env[68279]: DEBUG nova.compute.manager [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Refreshing instance network info cache due to event network-changed-a32389b4-23aa-4395-99b6-f055ab5890c5. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1459.569134] env[68279]: DEBUG oslo_concurrency.lockutils [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] Acquiring lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.569293] env[68279]: DEBUG oslo_concurrency.lockutils [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] Acquired lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1459.569472] env[68279]: DEBUG nova.network.neutron [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Refreshing network info cache for port a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1459.866174] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1459.866410] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.168856] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1460.169150] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Cleaning up deleted instances {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1460.264261] env[68279]: DEBUG nova.network.neutron [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updated VIF entry in instance network info cache for port a32389b4-23aa-4395-99b6-f055ab5890c5. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1460.264634] env[68279]: DEBUG nova.network.neutron [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updating instance_info_cache with network_info: [{"id": "a32389b4-23aa-4395-99b6-f055ab5890c5", "address": "fa:16:3e:f2:00:65", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa32389b4-23", "ovs_interfaceid": "a32389b4-23aa-4395-99b6-f055ab5890c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.369559] env[68279]: DEBUG nova.compute.utils [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1460.676573] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] There are 19 instances to clean {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1460.676760] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e6ea9225-fdae-4ba2-859d-f0293e10e0bf] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1460.767855] env[68279]: DEBUG oslo_concurrency.lockutils [req-ae62c676-06c3-487e-947e-b170fae8a19c req-8ade3707-54f3-4dca-a11b-7e625add2188 service nova] Releasing lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1460.871873] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1461.179661] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 04f42241-5598-47e2-906c-998a19da434f] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1461.682869] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: ec90ec2e-f4a2-4b71-8588-d45a086d9453] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1461.928598] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1461.928864] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1461.929119] env[68279]: INFO nova.compute.manager [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Attaching volume 9da9b7a5-cad2-4a0e-bb32-c98453ae0155 to /dev/sdb [ 1461.959545] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380a2599-0f7e-4a4c-adab-8262dc6b89d2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.967296] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f0e263-e564-4354-8666-e35ba7e9f12c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.981856] env[68279]: DEBUG nova.virt.block_device [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating existing volume attachment record: 67d1a767-6e30-4714-a8e3-b5ce2b99dd7b {{(pid=68279) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1462.185611] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: a4d4e9c0-0165-4c11-ba98-1214e70b91a3] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1462.689681] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: e246ae0f-1679-4757-acf2-ef5239f3c36d] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1463.193791] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 78b58db9-0616-428d-999c-2f6548008466] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1463.697586] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 79905489-388d-4540-bdff-1c6a02f8bebd] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1464.200934] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 5827dda0-48a4-4779-b6d2-7fbf73837583] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1464.704985] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 19f84ce0-5ab0-4749-a94a-3dbae0da8757] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1465.208432] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: c07d8d3c-2af3-47b7-87cb-980c7dd0204d] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1465.711649] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 5a7e2125-3310-4fcb-a281-59b0a2c07f67] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1466.215433] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: d318b1cb-91a1-49cd-a2b7-9ce9785dd1fd] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1466.719217] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 7c9c6661-2e52-4dba-8671-26f69d089903] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1467.024342] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Volume attach. Driver type: vmdk {{(pid=68279) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1467.024584] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594782', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'name': 'volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d', 'attached_at': '', 'detached_at': '', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'serial': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1467.025503] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9ae44d-8ad0-4b8f-a679-bf998f20d86d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.042094] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920bd2ad-7b31-4dca-b727-856e4ce91cc2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.067299] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155/volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1467.067549] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ddeb385-2af6-49bb-b607-3a9ed2a17627 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.085676] env[68279]: DEBUG oslo_vmware.api [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1467.085676] env[68279]: value = "task-2964156" [ 1467.085676] env[68279]: _type = "Task" [ 1467.085676] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.093293] env[68279]: DEBUG oslo_vmware.api [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964156, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.223208] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 2d05e318-abef-43b0-9ad3-8c839c372780] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1467.596224] env[68279]: DEBUG oslo_vmware.api [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964156, 'name': ReconfigVM_Task, 'duration_secs': 0.370597} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.596490] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155/volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1467.601198] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-864c3c62-a99f-4d63-bfaf-ec3bf1fc09c7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.616760] env[68279]: DEBUG oslo_vmware.api [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1467.616760] env[68279]: value = "task-2964157" [ 1467.616760] env[68279]: _type = "Task" [ 1467.616760] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.623689] env[68279]: DEBUG oslo_vmware.api [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964157, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.726474] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 11bbfd41-52bb-410c-b368-1473a309d6a7] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1468.126428] env[68279]: DEBUG oslo_vmware.api [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964157, 'name': ReconfigVM_Task, 'duration_secs': 0.128192} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.126709] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594782', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'name': 'volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d', 'attached_at': '', 'detached_at': '', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'serial': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155'} {{(pid=68279) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1468.230137] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 84b2828a-e62c-45b2-a5ee-067ca66e626b] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1468.732868] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: dfb2208a-4d2b-44c8-bc2d-9bfdd02984ba] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1469.161797] env[68279]: DEBUG nova.objects.instance [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'flavor' on Instance uuid b8d6f6fc-8bf6-46b2-8b35-94271c4e051d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1469.235682] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 7e34039c-c51a-4f9c-961c-144f6d8a5130] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1469.666725] env[68279]: DEBUG oslo_concurrency.lockutils [None req-7fd3400e-0b57-4761-9aa7-86d046402c3a tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.738s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1469.738442] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] [instance: 298d3bc2-1fad-481f-993b-8d0dc9ed1ed1] Instance has had 0 of 5 cleanup attempts {{(pid=68279) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1471.040920] env[68279]: DEBUG nova.compute.manager [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Stashing vm_state: active {{(pid=68279) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1471.558105] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1471.558380] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1472.063094] env[68279]: INFO nova.compute.claims [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1472.569883] env[68279]: INFO nova.compute.resource_tracker [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating resource usage from migration 168d9041-44b2-4b53-bc9f-009a5d20ab50 [ 1472.618392] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9998093d-68dd-4b3f-a6ce-12d7ec897352 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.626144] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a826bc-ed15-4661-8333-da3907565b40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.655228] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52175f24-3829-44e7-bfa2-4086b07a19bb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.661777] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcd9003-e378-4f5d-b282-783b20aaf32b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.674271] env[68279]: DEBUG nova.compute.provider_tree [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.177665] env[68279]: DEBUG nova.scheduler.client.report [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1473.684096] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.126s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1473.684389] env[68279]: INFO nova.compute.manager [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Migrating [ 1474.199251] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.199656] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1474.199656] env[68279]: DEBUG nova.network.neutron [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.898631] env[68279]: DEBUG nova.network.neutron [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.400931] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1476.915753] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da36478a-de0e-49d9-a9e2-3dc94c5cbf4d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.941966] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d' progress to 0 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1477.448541] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.448859] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bfa21b0-d438-4c10-9b09-a9cb2af23956 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.456706] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1477.456706] env[68279]: value = "task-2964158" [ 1477.456706] env[68279]: _type = "Task" [ 1477.456706] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.465892] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.967048] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964158, 'name': PowerOffVM_Task, 'duration_secs': 0.178035} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.967477] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1477.967529] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d' progress to 17 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1478.474968] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1478.474968] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.474968] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1478.475353] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.475353] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1478.475474] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1478.475684] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1478.475845] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1478.476022] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1478.476191] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1478.476379] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1478.481508] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d81ab8df-d32c-4b2b-9b12-3389560a67b3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.497991] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1478.497991] env[68279]: value = "task-2964159" [ 1478.497991] env[68279]: _type = "Task" [ 1478.497991] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.506346] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.008484] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964159, 'name': ReconfigVM_Task, 'duration_secs': 0.199663} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.009042] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d' progress to 33 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1479.516068] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1479.516068] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1479.516068] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1479.516068] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1479.516068] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1479.516068] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1479.516798] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1479.517118] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1479.517436] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1479.517739] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1479.518072] env[68279]: DEBUG nova.virt.hardware [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1479.523350] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1479.524020] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86b366a7-4eea-4ccd-b8fc-7949a1a375a8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.542194] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1479.542194] env[68279]: value = "task-2964160" [ 1479.542194] env[68279]: _type = "Task" [ 1479.542194] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.549690] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.052046] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964160, 'name': ReconfigVM_Task, 'duration_secs': 0.178867} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.052046] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1480.052735] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcecca14-c0cc-43bd-8a30-b01740dd10ee {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.076594] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1480.077152] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4feea09f-2f83-437e-bea3-1802b0663d56 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.093873] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1480.093873] env[68279]: value = "task-2964161" [ 1480.093873] env[68279]: _type = "Task" [ 1480.093873] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.101143] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.604017] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964161, 'name': ReconfigVM_Task, 'duration_secs': 0.285427} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.604286] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfigured VM instance instance-0000007c to attach disk [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1480.604554] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d' progress to 50 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1481.111429] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f896b187-16bb-4797-bda8-6593c60620be {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.133499] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c044aa2f-7460-4fda-94e6-15f860fd6767 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.152811] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d' progress to 67 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1482.789258] env[68279]: DEBUG nova.network.neutron [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Port c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1483.811051] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1483.811051] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1483.811498] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.845019] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.845353] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1484.845415] env[68279]: DEBUG nova.network.neutron [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.553679] env[68279]: DEBUG nova.network.neutron [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.056290] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1486.565475] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e286a5d-dd2b-4d1f-90d0-4abdf8b367cd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.572921] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c619d4b-c67a-4616-ac05-90fb7bb46c47 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.672592] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27635db-c630-4e60-bbc0-4a2a63aea946 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.694228] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366a395a-f9a0-4ed4-9492-0cba7c66471b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.700916] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d' progress to 83 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1488.206675] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1488.206882] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1da2f58-748a-4703-9de5-1f557c4b05d9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.214046] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1488.214046] env[68279]: value = "task-2964162" [ 1488.214046] env[68279]: _type = "Task" [ 1488.214046] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.221950] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.723748] env[68279]: DEBUG oslo_vmware.api [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964162, 'name': PowerOnVM_Task, 'duration_secs': 0.383758} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.724065] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.724280] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a131c0f7-9487-4472-8bab-ce20fd885b95 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d' progress to 100 {{(pid=68279) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1491.581366] env[68279]: DEBUG nova.network.neutron [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Port c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba binding to destination host cpu-1 is already ACTIVE {{(pid=68279) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1491.581660] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.581804] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1491.581969] env[68279]: DEBUG nova.network.neutron [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.302527] env[68279]: DEBUG nova.network.neutron [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.805581] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1493.309112] env[68279]: DEBUG nova.compute.manager [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68279) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1494.403427] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1494.403736] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1494.532508] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "c6071f0d-76f5-4415-b495-b0dbab00daca" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1494.532777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1494.532926] env[68279]: DEBUG nova.compute.manager [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1494.533828] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0325d3b3-48e7-4db2-8700-3fbfbf5d5c97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.540720] env[68279]: DEBUG nova.compute.manager [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68279) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1494.541278] env[68279]: DEBUG nova.objects.instance [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'flavor' on Instance uuid c6071f0d-76f5-4415-b495-b0dbab00daca {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1494.907025] env[68279]: DEBUG nova.objects.instance [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'migration_context' on Instance uuid b8d6f6fc-8bf6-46b2-8b35-94271c4e051d {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.465771] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c9c44f-1ecd-45a3-aa96-63b2050ffd49 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.475221] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62b411a-7ac5-4284-bc64-5cfbf935b736 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.516181] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c283d048-575e-41b3-9bae-93c593873735 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.523141] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677fa4e0-8101-452b-ac1c-60d4a3ce15f2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.535695] env[68279]: DEBUG nova.compute.provider_tree [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.548465] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1495.548675] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46609294-02f4-4191-8075-adc6d94bc40a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.554943] env[68279]: DEBUG oslo_vmware.api [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1495.554943] env[68279]: value = "task-2964163" [ 1495.554943] env[68279]: _type = "Task" [ 1495.554943] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.562198] env[68279]: DEBUG oslo_vmware.api [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.039095] env[68279]: DEBUG nova.scheduler.client.report [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1496.065202] env[68279]: DEBUG oslo_vmware.api [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964163, 'name': PowerOffVM_Task, 'duration_secs': 0.216057} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.065480] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1496.065676] env[68279]: DEBUG nova.compute.manager [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1496.066427] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23972258-3dfe-4e54-9675-2251433e8486 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.578732] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6e4e6884-2df7-43e1-a3c9-2f353ebfc08b tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1496.890598] env[68279]: DEBUG nova.objects.instance [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'flavor' on Instance uuid c6071f0d-76f5-4415-b495-b0dbab00daca {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1497.050421] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.647s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1497.395417] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.395628] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1497.395806] env[68279]: DEBUG nova.network.neutron [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1497.395983] env[68279]: DEBUG nova.objects.instance [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'info_cache' on Instance uuid c6071f0d-76f5-4415-b495-b0dbab00daca {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1497.900071] env[68279]: DEBUG nova.objects.base [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1498.583405] env[68279]: INFO nova.compute.manager [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Swapping old allocation on dict_keys(['40ba16cf-8244-4715-b8c1-975029462ee4']) held by migration 168d9041-44b2-4b53-bc9f-009a5d20ab50 for instance [ 1498.604536] env[68279]: DEBUG nova.scheduler.client.report [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Overwriting current allocation {'allocations': {'40ba16cf-8244-4715-b8c1-975029462ee4': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 181}}, 'project_id': '7c1ad13d5de94b14ab00b7f003c1851d', 'user_id': 'e0f0f631a27f4d93bcc70956d721d9ba', 'consumer_generation': 1} on consumer b8d6f6fc-8bf6-46b2-8b35-94271c4e051d {{(pid=68279) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1498.674234] env[68279]: DEBUG nova.network.neutron [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updating instance_info_cache with network_info: [{"id": "a32389b4-23aa-4395-99b6-f055ab5890c5", "address": "fa:16:3e:f2:00:65", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa32389b4-23", "ovs_interfaceid": "a32389b4-23aa-4395-99b6-f055ab5890c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.685220] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.685352] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1498.685549] env[68279]: DEBUG nova.network.neutron [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1499.178191] env[68279]: DEBUG oslo_concurrency.lockutils [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1499.383023] env[68279]: DEBUG nova.network.neutron [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [{"id": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "address": "fa:16:3e:61:f5:b3", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc458d54d-7a", "ovs_interfaceid": "c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.887047] env[68279]: DEBUG oslo_concurrency.lockutils [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1499.887263] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ee86b7-78b4-4ddf-8b86-356732eeaaa6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.894864] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17c5cd5-11cc-4925-a509-03690b8b00cc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.183625] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1500.184057] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-964ada9c-61e4-4ee8-b9a3-2555550da729 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.190740] env[68279]: DEBUG oslo_vmware.api [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1500.190740] env[68279]: value = "task-2964164" [ 1500.190740] env[68279]: _type = "Task" [ 1500.190740] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.198187] env[68279]: DEBUG oslo_vmware.api [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.701116] env[68279]: DEBUG oslo_vmware.api [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964164, 'name': PowerOnVM_Task, 'duration_secs': 0.354571} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.701370] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1500.701590] env[68279]: DEBUG nova.compute.manager [None req-81186650-1e7f-4afb-9430-a532fa1f4c5d tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1500.702301] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c3d6fe-787b-4e32-a464-eab81de7fd17 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.982827] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1500.982827] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5594e34f-b717-4b88-bf45-6c54dc68bc05 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.988568] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1500.988568] env[68279]: value = "task-2964165" [ 1500.988568] env[68279]: _type = "Task" [ 1500.988568] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.996406] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.498058] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964165, 'name': PowerOffVM_Task, 'duration_secs': 0.237149} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.498581] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1501.499028] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1501.499283] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.499398] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1501.499579] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.499725] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1501.499899] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1501.500136] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1501.500298] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1501.500460] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1501.500647] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1501.500837] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1501.505637] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edbf6492-621e-427d-ad91-0365153edc8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.519727] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1501.519727] env[68279]: value = "task-2964166" [ 1501.519727] env[68279]: _type = "Task" [ 1501.519727] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.527839] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.683440] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3470d354-4aef-46c2-9ccf-e0eaf67a620a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.689816] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6399e5a-4653-4105-9e25-b405d028fff1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Suspending the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1501.690042] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2d72ec6a-dd42-4e3a-b484-22a4642cba40 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.696217] env[68279]: DEBUG oslo_vmware.api [None req-a6399e5a-4653-4105-9e25-b405d028fff1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1501.696217] env[68279]: value = "task-2964167" [ 1501.696217] env[68279]: _type = "Task" [ 1501.696217] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.703591] env[68279]: DEBUG oslo_vmware.api [None req-a6399e5a-4653-4105-9e25-b405d028fff1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964167, 'name': SuspendVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.030018] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964166, 'name': ReconfigVM_Task, 'duration_secs': 0.147668} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.030942] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3184f0-6652-496c-8ba2-15a47a8dfc0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.052135] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1502.052394] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1502.052551] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1502.052733] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1502.052879] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1502.053037] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1502.053237] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1502.053396] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1502.053589] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1502.053794] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1502.054050] env[68279]: DEBUG nova.virt.hardware [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1502.054807] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adefa2fb-0044-40be-a37d-77d772aa426d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.059998] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1502.059998] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52610ee8-0b26-87cd-6b76-acf76b592ede" [ 1502.059998] env[68279]: _type = "Task" [ 1502.059998] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.067389] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52610ee8-0b26-87cd-6b76-acf76b592ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.205425] env[68279]: DEBUG oslo_vmware.api [None req-a6399e5a-4653-4105-9e25-b405d028fff1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964167, 'name': SuspendVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.570605] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52610ee8-0b26-87cd-6b76-acf76b592ede, 'name': SearchDatastore_Task, 'duration_secs': 0.009215} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.575843] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1502.576116] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62d8be6e-67cb-49a9-85f3-c10f42347501 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.593507] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1502.593507] env[68279]: value = "task-2964168" [ 1502.593507] env[68279]: _type = "Task" [ 1502.593507] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.601017] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964168, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.705414] env[68279]: DEBUG oslo_vmware.api [None req-a6399e5a-4653-4105-9e25-b405d028fff1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964167, 'name': SuspendVM_Task, 'duration_secs': 0.541887} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.706455] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a6399e5a-4653-4105-9e25-b405d028fff1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Suspended the VM {{(pid=68279) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1502.706455] env[68279]: DEBUG nova.compute.manager [None req-a6399e5a-4653-4105-9e25-b405d028fff1 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1502.706826] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bad8cf-48ba-463a-ad0a-d09fa3c75279 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.104415] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964168, 'name': ReconfigVM_Task, 'duration_secs': 0.170835} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.104415] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1503.104881] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c28062a-ad88-4e6a-b997-5df44fdfd12b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.129007] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1503.129269] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-469f6eb6-5376-4f02-9c48-05116539beca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.146530] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1503.146530] env[68279]: value = "task-2964169" [ 1503.146530] env[68279]: _type = "Task" [ 1503.146530] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.155226] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.656294] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964169, 'name': ReconfigVM_Task, 'duration_secs': 0.296428} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.656726] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfigured VM instance instance-0000007c to attach disk [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d/b8d6f6fc-8bf6-46b2-8b35-94271c4e051d.vmdk or device None with type thin {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1503.657814] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c31400-0d20-4ff9-8e74-d8e05cd6ddda {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.677693] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e16035-41d8-4690-b9c8-2b246bb36230 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.697202] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128a6f32-ed81-4ace-a333-91ebd4a68f72 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.716625] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a8ef33-2fa2-49c4-a9e7-3dba51718de8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.722788] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1503.723017] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-393b69f8-f757-40e5-8d2d-7fa4227058c5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.728752] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1503.728752] env[68279]: value = "task-2964170" [ 1503.728752] env[68279]: _type = "Task" [ 1503.728752] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.735926] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.997427] env[68279]: INFO nova.compute.manager [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Resuming [ 1503.998077] env[68279]: DEBUG nova.objects.instance [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'flavor' on Instance uuid c6071f0d-76f5-4415-b495-b0dbab00daca {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.238138] env[68279]: DEBUG oslo_vmware.api [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964170, 'name': PowerOnVM_Task, 'duration_secs': 0.42422} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.238419] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1505.280275] env[68279]: INFO nova.compute.manager [None req-6b3fb6ec-019c-4e9d-aad7-7c112645a3c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance to original state: 'active' [ 1505.508714] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.508881] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquired lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1505.509034] env[68279]: DEBUG nova.network.neutron [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1506.200087] env[68279]: DEBUG nova.network.neutron [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updating instance_info_cache with network_info: [{"id": "a32389b4-23aa-4395-99b6-f055ab5890c5", "address": "fa:16:3e:f2:00:65", "network": {"id": "6ed1af08-7a04-419c-ada2-3d38434ef016", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-789514676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d7a90a39b864e3e985b3b828c3fd363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa32389b4-23", "ovs_interfaceid": "a32389b4-23aa-4395-99b6-f055ab5890c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.702731] env[68279]: DEBUG oslo_concurrency.lockutils [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Releasing lock "refresh_cache-c6071f0d-76f5-4415-b495-b0dbab00daca" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1506.703888] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdd5585-813a-4ae1-acae-77503d7a1cd6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.712184] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Resuming the VM {{(pid=68279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1506.712450] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f254f2e0-6e44-4c8b-95aa-551ec92c4d2b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.719571] env[68279]: DEBUG oslo_vmware.api [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1506.719571] env[68279]: value = "task-2964171" [ 1506.719571] env[68279]: _type = "Task" [ 1506.719571] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.727288] env[68279]: DEBUG oslo_vmware.api [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.196691] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.196973] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.197305] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.197515] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.197724] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.200656] env[68279]: INFO nova.compute.manager [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Terminating instance [ 1507.232743] env[68279]: DEBUG oslo_vmware.api [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964171, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.704808] env[68279]: DEBUG nova.compute.manager [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1507.705223] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1507.705483] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-146bdfa0-a3c3-415b-9fcd-3a457e47f807 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.712749] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1507.712749] env[68279]: value = "task-2964172" [ 1507.712749] env[68279]: _type = "Task" [ 1507.712749] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.722441] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.731144] env[68279]: DEBUG oslo_vmware.api [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964171, 'name': PowerOnVM_Task, 'duration_secs': 0.535767} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.731380] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Resumed the VM {{(pid=68279) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1507.731553] env[68279]: DEBUG nova.compute.manager [None req-56beffc9-12f3-4e96-b569-d7587fd72bd9 tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1507.732279] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c1487e-cfca-4ca1-ab78-ef04dd68be76 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.223099] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964172, 'name': PowerOffVM_Task, 'duration_secs': 0.276588} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.223099] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1508.223298] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Volume detach. Driver type: vmdk {{(pid=68279) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1508.223366] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594782', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'name': 'volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d', 'attached_at': '2025-03-12T08:58:33.000000', 'detached_at': '', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'serial': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1508.224079] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825ceb6a-4729-429c-960e-971eb7f4bd60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.246209] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141b3155-4dd9-47bd-89b4-92777f393884 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.253317] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478501d6-de21-42f4-ab27-36cf9566aa03 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.274863] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9d16c2-b2f6-4ddb-95e2-c890feee17bf {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.289730] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] The volume has not been displaced from its original location: [datastore1] volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155/volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155.vmdk. No consolidation needed. {{(pid=68279) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1508.295013] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1508.295494] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03f43b55-75d9-4ff8-924a-05d92464a5c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.314960] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1508.314960] env[68279]: value = "task-2964173" [ 1508.314960] env[68279]: _type = "Task" [ 1508.314960] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.322095] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.631473] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "c6071f0d-76f5-4415-b495-b0dbab00daca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1508.631803] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.632074] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "c6071f0d-76f5-4415-b495-b0dbab00daca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1508.632277] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.632452] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1508.634549] env[68279]: INFO nova.compute.manager [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Terminating instance [ 1508.824904] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964173, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.138708] env[68279]: DEBUG nova.compute.manager [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1509.138935] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.139838] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed841d07-db65-4bb9-a6c1-4b1648f831e3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.147577] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1509.147779] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfae4a5b-f2b1-4440-afdb-cbae9e173f8c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.155028] env[68279]: DEBUG oslo_vmware.api [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1509.155028] env[68279]: value = "task-2964174" [ 1509.155028] env[68279]: _type = "Task" [ 1509.155028] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.162690] env[68279]: DEBUG oslo_vmware.api [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.325191] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964173, 'name': ReconfigVM_Task, 'duration_secs': 0.563509} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.325514] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=68279) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1509.330177] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55d6c0ab-9df9-456d-a166-66087e1225ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.345256] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1509.345256] env[68279]: value = "task-2964175" [ 1509.345256] env[68279]: _type = "Task" [ 1509.345256] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.352823] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.664332] env[68279]: DEBUG oslo_vmware.api [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.854981] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964175, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.164761] env[68279]: DEBUG oslo_vmware.api [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964174, 'name': PowerOffVM_Task, 'duration_secs': 0.819184} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.165324] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1510.166048] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1510.166048] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13b1c3f6-191b-432d-b722-b80ce84fd362 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.224296] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1510.224526] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1510.224682] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleting the datastore file [datastore1] c6071f0d-76f5-4415-b495-b0dbab00daca {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.224933] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1f30c7c-b28b-4cb8-97c3-3103b8da298b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.230763] env[68279]: DEBUG oslo_vmware.api [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for the task: (returnval){ [ 1510.230763] env[68279]: value = "task-2964177" [ 1510.230763] env[68279]: _type = "Task" [ 1510.230763] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.237978] env[68279]: DEBUG oslo_vmware.api [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.356227] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964175, 'name': ReconfigVM_Task, 'duration_secs': 0.592684} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.356568] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594782', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'name': 'volume-9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'b8d6f6fc-8bf6-46b2-8b35-94271c4e051d', 'attached_at': '2025-03-12T08:58:33.000000', 'detached_at': '', 'volume_id': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155', 'serial': '9da9b7a5-cad2-4a0e-bb32-c98453ae0155'} {{(pid=68279) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1510.356786] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1510.357543] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c5bab3-cda7-4c54-a4ab-8c96ed8575ec {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.364339] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1510.364557] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db34a91a-ae9e-48fb-bd65-939575488a93 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.418982] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1510.419250] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Deleting contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1510.419391] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleting the datastore file [datastore2] b8d6f6fc-8bf6-46b2-8b35-94271c4e051d {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.419644] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ae9099f-4c60-487d-9432-ba31d22912ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.425667] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1510.425667] env[68279]: value = "task-2964179" [ 1510.425667] env[68279]: _type = "Task" [ 1510.425667] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.433027] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.739972] env[68279]: DEBUG oslo_vmware.api [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Task: {'id': task-2964177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133609} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.740255] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1510.740428] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1510.740639] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.740777] env[68279]: INFO nova.compute.manager [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Took 1.60 seconds to destroy the instance on the hypervisor. [ 1510.741091] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1510.741290] env[68279]: DEBUG nova.compute.manager [-] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1510.741380] env[68279]: DEBUG nova.network.neutron [-] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1510.936045] env[68279]: DEBUG oslo_vmware.api [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140958} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.936479] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1510.936479] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Deleted contents of the VM from datastore datastore2 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1510.936640] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.936841] env[68279]: INFO nova.compute.manager [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Took 3.23 seconds to destroy the instance on the hypervisor. [ 1510.937076] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1510.937276] env[68279]: DEBUG nova.compute.manager [-] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1510.937374] env[68279]: DEBUG nova.network.neutron [-] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1511.196133] env[68279]: DEBUG nova.compute.manager [req-beee8435-d905-4a14-972c-f40ef290787f req-de22ed40-d590-4bc9-a113-052001436c5f service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Received event network-vif-deleted-a32389b4-23aa-4395-99b6-f055ab5890c5 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1511.196391] env[68279]: INFO nova.compute.manager [req-beee8435-d905-4a14-972c-f40ef290787f req-de22ed40-d590-4bc9-a113-052001436c5f service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Neutron deleted interface a32389b4-23aa-4395-99b6-f055ab5890c5; detaching it from the instance and deleting it from the info cache [ 1511.196598] env[68279]: DEBUG nova.network.neutron [req-beee8435-d905-4a14-972c-f40ef290787f req-de22ed40-d590-4bc9-a113-052001436c5f service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.405920] env[68279]: DEBUG nova.compute.manager [req-74a72d78-e78f-4ea7-8b3a-a7cf1288638e req-5d6b9a8a-7b7f-48da-a664-30bfbc017ca9 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Received event network-vif-deleted-c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1511.406145] env[68279]: INFO nova.compute.manager [req-74a72d78-e78f-4ea7-8b3a-a7cf1288638e req-5d6b9a8a-7b7f-48da-a664-30bfbc017ca9 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Neutron deleted interface c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba; detaching it from the instance and deleting it from the info cache [ 1511.406324] env[68279]: DEBUG nova.network.neutron [req-74a72d78-e78f-4ea7-8b3a-a7cf1288638e req-5d6b9a8a-7b7f-48da-a664-30bfbc017ca9 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.677091] env[68279]: DEBUG nova.network.neutron [-] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.704029] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be4704f4-4802-4105-9248-c500cc447183 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.713937] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bfeb83-c9d1-4775-bbfc-ebc0962bbc60 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.738901] env[68279]: DEBUG nova.compute.manager [req-beee8435-d905-4a14-972c-f40ef290787f req-de22ed40-d590-4bc9-a113-052001436c5f service nova] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Detach interface failed, port_id=a32389b4-23aa-4395-99b6-f055ab5890c5, reason: Instance c6071f0d-76f5-4415-b495-b0dbab00daca could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1511.890753] env[68279]: DEBUG nova.network.neutron [-] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.908793] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ad914b8-ea8d-4f28-bc2c-229016e7e8c0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.918674] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e028f8c2-c26f-4f98-82c8-eeab2125482a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.948593] env[68279]: DEBUG nova.compute.manager [req-74a72d78-e78f-4ea7-8b3a-a7cf1288638e req-5d6b9a8a-7b7f-48da-a664-30bfbc017ca9 service nova] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Detach interface failed, port_id=c458d54d-7a7c-4a13-a5fa-b0ca0b4938ba, reason: Instance b8d6f6fc-8bf6-46b2-8b35-94271c4e051d could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1512.180341] env[68279]: INFO nova.compute.manager [-] [instance: c6071f0d-76f5-4415-b495-b0dbab00daca] Took 1.44 seconds to deallocate network for instance. [ 1512.393324] env[68279]: INFO nova.compute.manager [-] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Took 1.46 seconds to deallocate network for instance. [ 1512.687266] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1512.687798] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1512.688085] env[68279]: DEBUG nova.objects.instance [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lazy-loading 'resources' on Instance uuid c6071f0d-76f5-4415-b495-b0dbab00daca {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1512.944636] env[68279]: INFO nova.compute.manager [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: b8d6f6fc-8bf6-46b2-8b35-94271c4e051d] Took 0.55 seconds to detach 1 volumes for instance. [ 1513.237430] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314de334-1906-4c6e-8498-4dd1d6ee6ec2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.245178] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6274f44-5152-419a-92e9-84601493f21e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.275179] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec20b1b-47a5-4eaf-b87e-790236c30480 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.282565] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3135405e-86c1-49e9-ba5f-bca870be4087 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.295956] env[68279]: DEBUG nova.compute.provider_tree [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1513.452209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1513.799290] env[68279]: DEBUG nova.scheduler.client.report [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1514.304462] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1514.307216] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.855s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1514.307558] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1514.322679] env[68279]: INFO nova.scheduler.client.report [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Deleted allocations for instance c6071f0d-76f5-4415-b495-b0dbab00daca [ 1514.324418] env[68279]: INFO nova.scheduler.client.report [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted allocations for instance b8d6f6fc-8bf6-46b2-8b35-94271c4e051d [ 1514.834170] env[68279]: DEBUG oslo_concurrency.lockutils [None req-32b93b57-c101-4807-9fc3-ded4e961d92a tempest-ServerActionsTestJSON-369211313 tempest-ServerActionsTestJSON-369211313-project-member] Lock "c6071f0d-76f5-4415-b495-b0dbab00daca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.202s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1514.835506] env[68279]: DEBUG oslo_concurrency.lockutils [None req-8def9b57-6db5-448b-9af3-ccac33c2c3a3 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "b8d6f6fc-8bf6-46b2-8b35-94271c4e051d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.638s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1515.482013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1515.482535] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1515.984981] env[68279]: DEBUG nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1516.511227] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1516.511494] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1516.513018] env[68279]: INFO nova.compute.claims [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1517.558420] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab5bbf6-1461-4a1c-b029-a572f9da587d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.566451] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4265ac-72f9-4c48-a346-d510c4c4b468 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.596019] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90f5f09-f367-4c7c-aa36-cc9cceeb02df {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.602841] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f11ee83-df86-464a-b4e4-86b94929d036 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.616924] env[68279]: DEBUG nova.compute.provider_tree [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.819960] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "ee6a719a-f353-47d3-ae56-5a3241b618cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1517.820230] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "ee6a719a-f353-47d3-ae56-5a3241b618cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1518.120307] env[68279]: DEBUG nova.scheduler.client.report [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1518.322550] env[68279]: DEBUG nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Starting instance... {{(pid=68279) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1518.625650] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.114s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1518.626193] env[68279]: DEBUG nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1518.840994] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1518.841277] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1518.842704] env[68279]: INFO nova.compute.claims [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1519.131832] env[68279]: DEBUG nova.compute.utils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1519.133268] env[68279]: DEBUG nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Allocating IP information in the background. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1519.133400] env[68279]: DEBUG nova.network.neutron [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] allocate_for_instance() {{(pid=68279) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1519.181332] env[68279]: DEBUG nova.policy [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0f0f631a27f4d93bcc70956d721d9ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c1ad13d5de94b14ab00b7f003c1851d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68279) authorize /opt/stack/nova/nova/policy.py:192}} [ 1519.442730] env[68279]: DEBUG nova.network.neutron [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Successfully created port: 120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1519.636919] env[68279]: DEBUG nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1519.887127] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6ea9fe-62c9-4310-a5a1-2004c112fc6e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.894793] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe09c582-ec98-4228-833d-71d7b509a1da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.926059] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50b6580-3125-4f80-acde-ee693d1af7b6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.933894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a661bc55-5f5a-40c2-8159-d21f98abf3fd {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.946913] env[68279]: DEBUG nova.compute.provider_tree [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1520.240867] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.240999] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.241156] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.241309] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.241457] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.241598] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.241739] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.241886] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1520.242072] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.450214] env[68279]: DEBUG nova.scheduler.client.report [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1520.647110] env[68279]: DEBUG nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1520.674146] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1520.674415] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1520.674574] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1520.674753] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1520.674934] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1520.675113] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1520.675321] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1520.675472] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1520.675637] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1520.675796] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1520.675969] env[68279]: DEBUG nova.virt.hardware [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1520.676834] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743aeafc-0afa-4abd-bd17-9154f44bb350 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.684839] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87fde72-23cf-4624-a45e-51f681f04458 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.745240] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1520.806895] env[68279]: DEBUG nova.compute.manager [req-188d3ae5-2f2a-4154-9a9e-87b785bd58d3 req-5a79a652-afba-4bc8-8862-e57b2ad8740c service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-vif-plugged-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1520.807209] env[68279]: DEBUG oslo_concurrency.lockutils [req-188d3ae5-2f2a-4154-9a9e-87b785bd58d3 req-5a79a652-afba-4bc8-8862-e57b2ad8740c service nova] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1520.807410] env[68279]: DEBUG oslo_concurrency.lockutils [req-188d3ae5-2f2a-4154-9a9e-87b785bd58d3 req-5a79a652-afba-4bc8-8862-e57b2ad8740c service nova] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1520.807581] env[68279]: DEBUG oslo_concurrency.lockutils [req-188d3ae5-2f2a-4154-9a9e-87b785bd58d3 req-5a79a652-afba-4bc8-8862-e57b2ad8740c service nova] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1520.807821] env[68279]: DEBUG nova.compute.manager [req-188d3ae5-2f2a-4154-9a9e-87b785bd58d3 req-5a79a652-afba-4bc8-8862-e57b2ad8740c service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] No waiting events found dispatching network-vif-plugged-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1520.807999] env[68279]: WARNING nova.compute.manager [req-188d3ae5-2f2a-4154-9a9e-87b785bd58d3 req-5a79a652-afba-4bc8-8862-e57b2ad8740c service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received unexpected event network-vif-plugged-120855ec-e8a4-4d63-b680-0a3a546fff36 for instance with vm_state building and task_state spawning. [ 1520.859821] env[68279]: DEBUG nova.network.neutron [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Successfully updated port: 120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.954949] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1520.955462] env[68279]: DEBUG nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Start building networks asynchronously for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1520.957955] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.213s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1520.958175] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1520.958362] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1520.959548] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56d6ae2-009e-43bc-b65b-c97a8ea91199 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.968915] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f1853d-0ee7-4554-b8d0-ae7c897de829 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.982373] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959065a3-ed68-4142-9e36-e8bd33dac206 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.988600] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad22a36-1897-4bd0-91ba-c362e9be05b8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.016630] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180928MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1521.016777] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1521.016963] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1521.362808] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.362860] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1521.362991] env[68279]: DEBUG nova.network.neutron [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1521.460169] env[68279]: DEBUG nova.compute.utils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Using /dev/sd instead of None {{(pid=68279) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1521.462443] env[68279]: DEBUG nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Not allocating networking since 'none' was specified. {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1521.892719] env[68279]: DEBUG nova.network.neutron [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1521.962736] env[68279]: DEBUG nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Start building block device mappings for instance. {{(pid=68279) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1522.020196] env[68279]: DEBUG nova.network.neutron [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120855ec-e8", "ovs_interfaceid": "120855ec-e8a4-4d63-b680-0a3a546fff36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.039053] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1522.039216] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance ee6a719a-f353-47d3-ae56-5a3241b618cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1522.039392] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1522.039530] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1522.072474] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d75b1cd-ffb1-4817-861a-24e6e781316a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.081213] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4748d4-047d-4990-86c2-9c38774bdbf2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.110155] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f17f633-8b04-480f-bf28-06af185c4f0f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.116751] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb3caa1-5354-4262-83e2-635c4fe85f01 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.129226] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1522.526013] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1522.526013] env[68279]: DEBUG nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Instance network_info: |[{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120855ec-e8", "ovs_interfaceid": "120855ec-e8a4-4d63-b680-0a3a546fff36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68279) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1522.526013] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:80:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '120855ec-e8a4-4d63-b680-0a3a546fff36', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1522.531738] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1522.532152] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1522.532518] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0779ed4a-2776-44ef-8505-af11bd8f31a2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.552530] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1522.552530] env[68279]: value = "task-2964180" [ 1522.552530] env[68279]: _type = "Task" [ 1522.552530] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.562568] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964180, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.632354] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1522.831905] env[68279]: DEBUG nova.compute.manager [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1522.832123] env[68279]: DEBUG nova.compute.manager [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing instance network info cache due to event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1522.832334] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] Acquiring lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.832476] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] Acquired lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1522.832633] env[68279]: DEBUG nova.network.neutron [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1522.971327] env[68279]: DEBUG nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Start spawning the instance on the hypervisor. {{(pid=68279) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1522.998084] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:41:26Z,direct_url=,disk_format='vmdk',id=01e502b7-2447-4972-9fe7-fd69f76ef71f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c7a4facdfe194e1cab42f5a1979bf666',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:41:27Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1522.998333] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.998489] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1522.998666] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.998841] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1522.998996] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1522.999213] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1522.999369] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1522.999531] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1522.999690] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1522.999857] env[68279]: DEBUG nova.virt.hardware [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1523.000724] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75731cbe-3535-4194-b3d3-67a043d0d502 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.008327] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a808daa-300a-41bc-afc0-b3605eaee7fe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.021207] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Instance VIF info [] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1523.026468] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Creating folder: Project (d6725cb790654ef4b5cb6aa3446e88aa). Parent ref: group-v594445. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.026776] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b293b81c-a833-4d96-b17b-4b2832981864 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.037254] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Created folder: Project (d6725cb790654ef4b5cb6aa3446e88aa) in parent group-v594445. [ 1523.037424] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Creating folder: Instances. Parent ref: group-v594784. {{(pid=68279) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.037616] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05c367da-d5f7-4e4c-a770-75d0e62bab34 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.045949] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Created folder: Instances in parent group-v594784. [ 1523.046176] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1523.046354] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1523.046530] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21b25b72-04ec-49bf-98e3-76c73edcc4ba {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.064935] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964180, 'name': CreateVM_Task, 'duration_secs': 0.291736} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.065898] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1523.066101] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1523.066101] env[68279]: value = "task-2964183" [ 1523.066101] env[68279]: _type = "Task" [ 1523.066101] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.066698] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.066854] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1523.067189] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1523.067440] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7a7a623-f70b-4c5d-96c7-4ce6a494d198 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.073922] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1523.073922] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523dd482-05c8-4e63-30af-328a23904b78" [ 1523.073922] env[68279]: _type = "Task" [ 1523.073922] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.077049] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964183, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.084072] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523dd482-05c8-4e63-30af-328a23904b78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.137903] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1523.138168] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.121s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1523.514430] env[68279]: DEBUG nova.network.neutron [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updated VIF entry in instance network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1523.514803] env[68279]: DEBUG nova.network.neutron [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120855ec-e8", "ovs_interfaceid": "120855ec-e8a4-4d63-b680-0a3a546fff36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.576341] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964183, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.585290] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]523dd482-05c8-4e63-30af-328a23904b78, 'name': SearchDatastore_Task, 'duration_secs': 0.012088} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.585563] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1523.585828] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1523.586124] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.586205] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1523.586366] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1523.586598] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-789592f0-300c-4e8c-aa36-5fe5800cd670 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.594500] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1523.594663] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1523.595337] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a057a867-dba9-4d55-9967-f4e3ef2271da {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.600105] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1523.600105] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f51bd8-1559-ba78-02bb-27381706c04c" [ 1523.600105] env[68279]: _type = "Task" [ 1523.600105] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.608178] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f51bd8-1559-ba78-02bb-27381706c04c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.018121] env[68279]: DEBUG oslo_concurrency.lockutils [req-ea081afc-3991-44ee-9dbc-3ef41f197ae4 req-26424a69-cc5a-49e4-b8eb-dbeca8bc7be0 service nova] Releasing lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1524.076755] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964183, 'name': CreateVM_Task} progress is 99%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.109239] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52f51bd8-1559-ba78-02bb-27381706c04c, 'name': SearchDatastore_Task, 'duration_secs': 0.008893} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.109958] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1fe5cad-f149-435e-a353-ec425af177f8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.114750] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1524.114750] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e23a98-4e33-85ec-b0f1-d32d10d26b8d" [ 1524.114750] env[68279]: _type = "Task" [ 1524.114750] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.122193] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e23a98-4e33-85ec-b0f1-d32d10d26b8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.578732] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964183, 'name': CreateVM_Task, 'duration_secs': 1.245933} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.579010] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1524.579295] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.579458] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1524.579780] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1524.580024] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79135066-9891-4b78-aa18-3e5a71189782 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.585076] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1524.585076] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e7546e-5924-4945-abe1-95d48acf1c8e" [ 1524.585076] env[68279]: _type = "Task" [ 1524.585076] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.592531] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e7546e-5924-4945-abe1-95d48acf1c8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.623494] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e23a98-4e33-85ec-b0f1-d32d10d26b8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009988} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.623723] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1524.624040] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1524.624291] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dac25803-f957-479a-bd8e-b03f164f5f14 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.630616] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1524.630616] env[68279]: value = "task-2964184" [ 1524.630616] env[68279]: _type = "Task" [ 1524.630616] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.637534] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.061344] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.094777] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52e7546e-5924-4945-abe1-95d48acf1c8e, 'name': SearchDatastore_Task, 'duration_secs': 0.010674} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.095088] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1525.095320] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Processing image 01e502b7-2447-4972-9fe7-fd69f76ef71f {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1525.095580] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.095751] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1525.095920] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1525.096187] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdd6b091-06f8-43ef-8510-cae108ddca6a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.106963] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1525.107229] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1525.107936] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f4e94f-bc6c-4ae3-adc2-b3884d7d9971 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.113288] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1525.113288] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f1e9-8f38-09a6-3db3-7efddb6ed1d8" [ 1525.113288] env[68279]: _type = "Task" [ 1525.113288] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.120961] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f1e9-8f38-09a6-3db3-7efddb6ed1d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.138549] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471074} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.138677] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1525.138806] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1525.139369] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a047960c-f979-4bf7-889c-a18f3e2eb85b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.145576] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1525.145576] env[68279]: value = "task-2964185" [ 1525.145576] env[68279]: _type = "Task" [ 1525.145576] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.152514] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.623333] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5233f1e9-8f38-09a6-3db3-7efddb6ed1d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009039} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.624092] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48b698d7-9909-4f45-8367-25f7a501c9d6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.628807] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1525.628807] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cfd8bd-3c30-6401-ac28-905e6233d5e2" [ 1525.628807] env[68279]: _type = "Task" [ 1525.628807] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.635768] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cfd8bd-3c30-6401-ac28-905e6233d5e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.652681] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065608} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.652911] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1525.653616] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d09a451-c0d7-4a2a-9a1b-9c78f6bd296c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.674493] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1525.674719] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c391850-d99a-4aaf-9ec3-b57b6e842c2e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.692637] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1525.692637] env[68279]: value = "task-2964186" [ 1525.692637] env[68279]: _type = "Task" [ 1525.692637] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.700885] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.138503] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52cfd8bd-3c30-6401-ac28-905e6233d5e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009248} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.138873] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1526.139013] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] ee6a719a-f353-47d3-ae56-5a3241b618cc/ee6a719a-f353-47d3-ae56-5a3241b618cc.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1526.139266] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4fda266-49c1-4549-b904-f6df2dcc1d80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.145448] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1526.145448] env[68279]: value = "task-2964187" [ 1526.145448] env[68279]: _type = "Task" [ 1526.145448] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.153325] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.201797] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964186, 'name': ReconfigVM_Task, 'duration_secs': 0.282504} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.202082] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Reconfigured VM instance instance-0000007e to attach disk [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1526.202696] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1435fc89-609b-466c-9afe-4d6c6e722a41 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.208532] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1526.208532] env[68279]: value = "task-2964188" [ 1526.208532] env[68279]: _type = "Task" [ 1526.208532] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.215666] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964188, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.655639] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477128} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.655987] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01e502b7-2447-4972-9fe7-fd69f76ef71f/01e502b7-2447-4972-9fe7-fd69f76ef71f.vmdk to [datastore1] ee6a719a-f353-47d3-ae56-5a3241b618cc/ee6a719a-f353-47d3-ae56-5a3241b618cc.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1526.656256] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Extending root virtual disk to 1048576 {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1526.656561] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48bd701b-4c26-44ae-953c-fc5d5f221f9d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.663350] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1526.663350] env[68279]: value = "task-2964189" [ 1526.663350] env[68279]: _type = "Task" [ 1526.663350] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.671215] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964189, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.718179] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964188, 'name': Rename_Task, 'duration_secs': 0.157806} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.718446] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1526.718682] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00b2704c-25c1-443d-bf63-8ea6a9704734 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.725265] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1526.725265] env[68279]: value = "task-2964190" [ 1526.725265] env[68279]: _type = "Task" [ 1526.725265] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.733350] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.172930] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964189, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068881} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.173223] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Extended root virtual disk {{(pid=68279) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1527.173987] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e527ad-9af2-473c-96e2-dde5d004b691 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.193153] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] ee6a719a-f353-47d3-ae56-5a3241b618cc/ee6a719a-f353-47d3-ae56-5a3241b618cc.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1527.193401] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81f661bd-6690-474f-ab79-33aa0257fb0d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.214753] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1527.214753] env[68279]: value = "task-2964191" [ 1527.214753] env[68279]: _type = "Task" [ 1527.214753] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.222975] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.234175] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964190, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.725215] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.734442] env[68279]: DEBUG oslo_vmware.api [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964190, 'name': PowerOnVM_Task, 'duration_secs': 0.510929} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.734720] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1527.734939] env[68279]: INFO nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Took 7.09 seconds to spawn the instance on the hypervisor. [ 1527.735152] env[68279]: DEBUG nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1527.735967] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23d9a5f-3db2-43d7-ace6-001cc146730b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.226194] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964191, 'name': ReconfigVM_Task, 'duration_secs': 0.61052} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.226534] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Reconfigured VM instance instance-0000007f to attach disk [datastore1] ee6a719a-f353-47d3-ae56-5a3241b618cc/ee6a719a-f353-47d3-ae56-5a3241b618cc.vmdk or device None with type sparse {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1528.227084] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e212075-ff58-48c3-9f0e-2bc6403ced9e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.233116] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1528.233116] env[68279]: value = "task-2964192" [ 1528.233116] env[68279]: _type = "Task" [ 1528.233116] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.240573] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964192, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.253105] env[68279]: INFO nova.compute.manager [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Took 11.76 seconds to build instance. [ 1528.720974] env[68279]: DEBUG nova.compute.manager [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1528.721187] env[68279]: DEBUG nova.compute.manager [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing instance network info cache due to event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1528.721410] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] Acquiring lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.721927] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] Acquired lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1528.721927] env[68279]: DEBUG nova.network.neutron [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1528.743023] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964192, 'name': Rename_Task, 'duration_secs': 0.141046} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.743280] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1528.743518] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4837a6c-fd3d-4d33-8fba-80921abf9185 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.749292] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1528.749292] env[68279]: value = "task-2964193" [ 1528.749292] env[68279]: _type = "Task" [ 1528.749292] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.756744] env[68279]: DEBUG oslo_concurrency.lockutils [None req-3b044e64-265d-4f50-87f4-46b7f3c9a701 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.274s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1528.757220] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.258681] env[68279]: DEBUG oslo_vmware.api [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964193, 'name': PowerOnVM_Task, 'duration_secs': 0.507886} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.259079] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1529.259174] env[68279]: INFO nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Took 6.29 seconds to spawn the instance on the hypervisor. [ 1529.259348] env[68279]: DEBUG nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1529.260164] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923a1220-83ce-4649-bf80-59987cde3202 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.435388] env[68279]: DEBUG nova.network.neutron [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updated VIF entry in instance network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1529.435748] env[68279]: DEBUG nova.network.neutron [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120855ec-e8", "ovs_interfaceid": "120855ec-e8a4-4d63-b680-0a3a546fff36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.779944] env[68279]: INFO nova.compute.manager [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Took 10.95 seconds to build instance. [ 1529.939224] env[68279]: DEBUG oslo_concurrency.lockutils [req-b2887a28-4edd-4a58-835d-27b9f59fbd86 req-bf6766f0-7b21-4643-ae9d-c8271df512a6 service nova] Releasing lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1530.282160] env[68279]: DEBUG oslo_concurrency.lockutils [None req-31b9c497-7cba-4bbd-8e14-ddb28adb29fe tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "ee6a719a-f353-47d3-ae56-5a3241b618cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.462s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1530.875779] env[68279]: DEBUG nova.compute.manager [None req-7c531ddb-2dad-45dc-af4b-bd9b946f004a tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1530.876992] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106aeda4-3738-4b4e-bde4-df64a60378ed {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.936802] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "ee6a719a-f353-47d3-ae56-5a3241b618cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1530.937093] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "ee6a719a-f353-47d3-ae56-5a3241b618cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1530.937309] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "ee6a719a-f353-47d3-ae56-5a3241b618cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1530.937491] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "ee6a719a-f353-47d3-ae56-5a3241b618cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1530.937661] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "ee6a719a-f353-47d3-ae56-5a3241b618cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1530.939726] env[68279]: INFO nova.compute.manager [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Terminating instance [ 1531.390488] env[68279]: INFO nova.compute.manager [None req-7c531ddb-2dad-45dc-af4b-bd9b946f004a tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] instance snapshotting [ 1531.391122] env[68279]: DEBUG nova.objects.instance [None req-7c531ddb-2dad-45dc-af4b-bd9b946f004a tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lazy-loading 'flavor' on Instance uuid ee6a719a-f353-47d3-ae56-5a3241b618cc {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1531.443361] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "refresh_cache-ee6a719a-f353-47d3-ae56-5a3241b618cc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.443532] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquired lock "refresh_cache-ee6a719a-f353-47d3-ae56-5a3241b618cc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1531.443705] env[68279]: DEBUG nova.network.neutron [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1531.896886] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c71796f-1721-4032-89b7-6780a5587584 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.913983] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48bf18f-ba3a-4bef-b530-413cff3221e5 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.962123] env[68279]: DEBUG nova.network.neutron [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1532.015979] env[68279]: DEBUG nova.network.neutron [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.424325] env[68279]: DEBUG nova.compute.manager [None req-7c531ddb-2dad-45dc-af4b-bd9b946f004a tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Instance disappeared during snapshot {{(pid=68279) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1532.518359] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Releasing lock "refresh_cache-ee6a719a-f353-47d3-ae56-5a3241b618cc" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1532.518767] env[68279]: DEBUG nova.compute.manager [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1532.518970] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1532.519841] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822032bf-abb1-484f-8c53-de3164e0e956 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.527418] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1532.527649] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-095ba7b0-1313-4ceb-8058-8a30070cfbc7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.533522] env[68279]: DEBUG oslo_vmware.api [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1532.533522] env[68279]: value = "task-2964194" [ 1532.533522] env[68279]: _type = "Task" [ 1532.533522] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.541203] env[68279]: DEBUG oslo_vmware.api [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.649490] env[68279]: DEBUG nova.compute.manager [None req-7c531ddb-2dad-45dc-af4b-bd9b946f004a tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Found 0 images (rotation: 2) {{(pid=68279) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1533.043441] env[68279]: DEBUG oslo_vmware.api [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964194, 'name': PowerOffVM_Task, 'duration_secs': 0.117887} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.043694] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1533.043821] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1533.044084] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a4802aa-595c-45ce-8993-d00248c220e4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.069559] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1533.069789] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1533.069921] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Deleting the datastore file [datastore1] ee6a719a-f353-47d3-ae56-5a3241b618cc {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1533.070191] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f18ee7de-4c41-4241-bd1f-6ca4d3494277 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.076222] env[68279]: DEBUG oslo_vmware.api [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for the task: (returnval){ [ 1533.076222] env[68279]: value = "task-2964196" [ 1533.076222] env[68279]: _type = "Task" [ 1533.076222] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.084360] env[68279]: DEBUG oslo_vmware.api [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.586069] env[68279]: DEBUG oslo_vmware.api [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Task: {'id': task-2964196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104747} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.586455] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1533.586455] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1533.586547] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1533.586684] env[68279]: INFO nova.compute.manager [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1533.586931] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1533.587135] env[68279]: DEBUG nova.compute.manager [-] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1533.587229] env[68279]: DEBUG nova.network.neutron [-] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1533.602524] env[68279]: DEBUG nova.network.neutron [-] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Instance cache missing network info. {{(pid=68279) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1534.105276] env[68279]: DEBUG nova.network.neutron [-] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.608323] env[68279]: INFO nova.compute.manager [-] [instance: ee6a719a-f353-47d3-ae56-5a3241b618cc] Took 1.02 seconds to deallocate network for instance. [ 1535.115559] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1535.115947] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1535.116269] env[68279]: DEBUG nova.objects.instance [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lazy-loading 'resources' on Instance uuid ee6a719a-f353-47d3-ae56-5a3241b618cc {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1535.661590] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b86e0d4-e976-46f2-b816-b46d30f304c6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.669142] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aafc40e-4334-415d-abac-8ba12a408db0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.698569] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532e5d2b-c2e2-46fa-812c-aacfaac7ed29 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.705201] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad544a44-c2f8-4f1b-8ef2-ca7fa0c3b7dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.718578] env[68279]: DEBUG nova.compute.provider_tree [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.221557] env[68279]: DEBUG nova.scheduler.client.report [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1536.727611] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1536.746558] env[68279]: INFO nova.scheduler.client.report [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Deleted allocations for instance ee6a719a-f353-47d3-ae56-5a3241b618cc [ 1537.256124] env[68279]: DEBUG oslo_concurrency.lockutils [None req-b07a8019-afea-4631-9f76-5d2d6c2024e6 tempest-ServersAaction247Test-1284479713 tempest-ServersAaction247Test-1284479713-project-member] Lock "ee6a719a-f353-47d3-ae56-5a3241b618cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.319s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1566.529312] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1566.529748] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1566.529748] env[68279]: INFO nova.compute.manager [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Shelving [ 1567.539462] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1567.539829] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1810709-ad84-4bd0-9268-868151371963 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.548058] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1567.548058] env[68279]: value = "task-2964197" [ 1567.548058] env[68279]: _type = "Task" [ 1567.548058] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.555946] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964197, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.057698] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964197, 'name': PowerOffVM_Task, 'duration_secs': 0.197475} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.059052] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1568.059052] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c7c068-fbc5-4c0e-a685-f1b757f45b2a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.077770] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c5a0f4-a9ee-4031-97ef-cbe40af0bf97 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.587933] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Creating Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1568.588408] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9bc75ef9-927e-4ef9-b96e-648c56dc7a7b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.596813] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1568.596813] env[68279]: value = "task-2964198" [ 1568.596813] env[68279]: _type = "Task" [ 1568.596813] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.604610] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964198, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.107296] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964198, 'name': CreateSnapshot_Task, 'duration_secs': 0.400432} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.107574] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Created Snapshot of the VM instance {{(pid=68279) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1569.108369] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a743d7d-fb07-4344-9330-b0eda9e3e1d1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.626275] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Creating linked-clone VM from snapshot {{(pid=68279) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1569.626614] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8e094c6e-cecb-4a22-81d7-a422139c37b9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.635491] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1569.635491] env[68279]: value = "task-2964199" [ 1569.635491] env[68279]: _type = "Task" [ 1569.635491] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.643145] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964199, 'name': CloneVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.144757] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964199, 'name': CloneVM_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.645050] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964199, 'name': CloneVM_Task, 'duration_secs': 0.893832} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.645408] env[68279]: INFO nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Created linked-clone VM from snapshot [ 1570.646017] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91e6ff5-e4ff-4117-bdf6-457b26d87d35 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.653027] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Uploading image 0951a8e9-3711-48fe-baaf-6f7422d21b26 {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1570.676248] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1570.676248] env[68279]: value = "vm-594788" [ 1570.676248] env[68279]: _type = "VirtualMachine" [ 1570.676248] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1570.676524] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-dbff8ebf-fe0a-4d82-856a-5fc0ddf52d8a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.683203] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease: (returnval){ [ 1570.683203] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252937f-d647-1426-19c1-7928566a4bb1" [ 1570.683203] env[68279]: _type = "HttpNfcLease" [ 1570.683203] env[68279]: } obtained for exporting VM: (result){ [ 1570.683203] env[68279]: value = "vm-594788" [ 1570.683203] env[68279]: _type = "VirtualMachine" [ 1570.683203] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1570.683470] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the lease: (returnval){ [ 1570.683470] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252937f-d647-1426-19c1-7928566a4bb1" [ 1570.683470] env[68279]: _type = "HttpNfcLease" [ 1570.683470] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1570.689545] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1570.689545] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252937f-d647-1426-19c1-7928566a4bb1" [ 1570.689545] env[68279]: _type = "HttpNfcLease" [ 1570.689545] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1571.168709] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1571.168967] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1571.191659] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1571.191659] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252937f-d647-1426-19c1-7928566a4bb1" [ 1571.191659] env[68279]: _type = "HttpNfcLease" [ 1571.191659] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1571.191980] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1571.191980] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]5252937f-d647-1426-19c1-7928566a4bb1" [ 1571.191980] env[68279]: _type = "HttpNfcLease" [ 1571.191980] env[68279]: }. {{(pid=68279) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1571.192601] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4ef153-0e21-443a-9b02-d0d78c1731ae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.199693] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d53d1-6fd9-d82a-0357-71bb2f95593c/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1571.199864] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d53d1-6fd9-d82a-0357-71bb2f95593c/disk-0.vmdk for reading. {{(pid=68279) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1571.290941] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4f482eb6-539e-4dd3-85a4-6fe24a6e02dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.168815] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.164179] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.167822] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.168495] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.671857] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1574.672125] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1574.672300] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1574.672456] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1574.673403] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116d1957-db60-4512-8996-9d9de6bc2d66 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.683409] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2971c1dc-4624-404d-b129-05671346390f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.697646] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f6d3a5-c155-4bde-8dfe-6b15cf920d70 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.704291] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e8d85e-3681-4b5f-8e31-74e480940657 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.733734] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180928MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1574.733883] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1574.734110] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.758602] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Instance ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68279) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1575.758881] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1575.758951] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1575.785919] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ec4038-3dd6-4703-9873-90c42209d207 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.793517] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389265c8-ad8d-4028-b5fc-c5eaba1a2e21 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.824310] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75de145e-302a-48d7-9ddd-33a8efdea62f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.831834] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dadd64-f0fd-485f-b3f1-468158a8f918 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.845133] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.347989] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1576.853277] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1576.853680] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.119s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1578.668427] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d53d1-6fd9-d82a-0357-71bb2f95593c/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1578.669347] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8138aaf0-f76a-4c24-a7ee-8e46afb4731c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.675373] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d53d1-6fd9-d82a-0357-71bb2f95593c/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1578.675547] env[68279]: ERROR oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d53d1-6fd9-d82a-0357-71bb2f95593c/disk-0.vmdk due to incomplete transfer. [ 1578.675752] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d3074504-9874-475f-a772-7032badc61d3 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.682282] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d53d1-6fd9-d82a-0357-71bb2f95593c/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1578.682471] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Uploaded image 0951a8e9-3711-48fe-baaf-6f7422d21b26 to the Glance image server {{(pid=68279) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1578.684943] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Destroying the VM {{(pid=68279) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1578.685179] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-be39e0f7-3549-4ac3-a102-df02deb44b65 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.691054] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1578.691054] env[68279]: value = "task-2964201" [ 1578.691054] env[68279]: _type = "Task" [ 1578.691054] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.698125] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964201, 'name': Destroy_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.853193] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.853464] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1579.168673] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1579.200639] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964201, 'name': Destroy_Task, 'duration_secs': 0.310207} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.200874] env[68279]: INFO nova.virt.vmwareapi.vm_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Destroyed the VM [ 1579.201123] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Deleting Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1579.201364] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-76b01f8f-7280-4afc-94eb-e139b56fcd21 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.207374] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1579.207374] env[68279]: value = "task-2964202" [ 1579.207374] env[68279]: _type = "Task" [ 1579.207374] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.215993] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964202, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.717375] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964202, 'name': RemoveSnapshot_Task, 'duration_secs': 0.372111} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.717740] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Deleted Snapshot of the VM instance {{(pid=68279) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1579.717839] env[68279]: DEBUG nova.compute.manager [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1579.718649] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67615bd-61c9-4142-be6c-63aef2774414 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.229926] env[68279]: INFO nova.compute.manager [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Shelve offloading [ 1580.733842] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1580.734172] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73c51d5f-f8d4-41c4-a33b-8fb136f0ea4f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.742135] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1580.742135] env[68279]: value = "task-2964203" [ 1580.742135] env[68279]: _type = "Task" [ 1580.742135] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.750621] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.252538] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] VM already powered off {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1581.252745] env[68279]: DEBUG nova.compute.manager [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1581.253508] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e5bc7c-7273-40ef-97a7-02bd7260f19d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.258978] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.259149] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1581.259320] env[68279]: DEBUG nova.network.neutron [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.961801] env[68279]: DEBUG nova.network.neutron [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120855ec-e8", "ovs_interfaceid": "120855ec-e8a4-4d63-b680-0a3a546fff36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.464666] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1582.678363] env[68279]: DEBUG nova.compute.manager [req-e5ba3485-ee70-4279-a9b7-df7dcc089a6a req-b94ae32a-1971-4c31-b39f-8ed0dfc5dae7 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-vif-unplugged-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1582.678579] env[68279]: DEBUG oslo_concurrency.lockutils [req-e5ba3485-ee70-4279-a9b7-df7dcc089a6a req-b94ae32a-1971-4c31-b39f-8ed0dfc5dae7 service nova] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1582.678786] env[68279]: DEBUG oslo_concurrency.lockutils [req-e5ba3485-ee70-4279-a9b7-df7dcc089a6a req-b94ae32a-1971-4c31-b39f-8ed0dfc5dae7 service nova] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1582.678952] env[68279]: DEBUG oslo_concurrency.lockutils [req-e5ba3485-ee70-4279-a9b7-df7dcc089a6a req-b94ae32a-1971-4c31-b39f-8ed0dfc5dae7 service nova] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1582.679192] env[68279]: DEBUG nova.compute.manager [req-e5ba3485-ee70-4279-a9b7-df7dcc089a6a req-b94ae32a-1971-4c31-b39f-8ed0dfc5dae7 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] No waiting events found dispatching network-vif-unplugged-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1582.679392] env[68279]: WARNING nova.compute.manager [req-e5ba3485-ee70-4279-a9b7-df7dcc089a6a req-b94ae32a-1971-4c31-b39f-8ed0dfc5dae7 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received unexpected event network-vif-unplugged-120855ec-e8a4-4d63-b680-0a3a546fff36 for instance with vm_state shelved and task_state shelving_offloading. [ 1582.810868] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1582.811762] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8785f809-8343-4305-ad05-dca582e4fca9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.819264] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1582.819500] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b1f0f12-7e70-4f52-8176-14123fe3da2a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.887636] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1582.887907] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1582.888038] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleting the datastore file [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1582.888335] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b874a2b-46b3-4cbb-b468-40949a80b387 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.895065] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1582.895065] env[68279]: value = "task-2964205" [ 1582.895065] env[68279]: _type = "Task" [ 1582.895065] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.902459] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964205, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.404810] env[68279]: DEBUG oslo_vmware.api [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964205, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126525} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.405217] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1583.405217] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1583.405389] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1583.428889] env[68279]: INFO nova.scheduler.client.report [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted allocations for instance ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a [ 1583.932919] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1583.933209] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1583.933435] env[68279]: DEBUG nova.objects.instance [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'resources' on Instance uuid ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1584.436212] env[68279]: DEBUG nova.objects.instance [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'numa_topology' on Instance uuid ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1584.701646] env[68279]: DEBUG nova.compute.manager [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1584.701811] env[68279]: DEBUG nova.compute.manager [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing instance network info cache due to event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1584.702034] env[68279]: DEBUG oslo_concurrency.lockutils [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] Acquiring lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.702180] env[68279]: DEBUG oslo_concurrency.lockutils [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] Acquired lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1584.702340] env[68279]: DEBUG nova.network.neutron [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1584.939231] env[68279]: DEBUG nova.objects.base [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68279) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1584.964786] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f943c42e-7d32-476c-b147-cf3f30df5609 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.972664] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984f2dbf-9c4d-4c58-b8fb-87fd122e7225 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.001569] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e099e5-1e15-45c3-9b95-c322f880f16a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.008280] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443372e6-01cb-43b7-be95-2a2a03a78a1d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.020890] env[68279]: DEBUG nova.compute.provider_tree [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1585.166397] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1585.416309] env[68279]: DEBUG nova.network.neutron [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updated VIF entry in instance network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1585.416650] env[68279]: DEBUG nova.network.neutron [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap120855ec-e8", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.524425] env[68279]: DEBUG nova.scheduler.client.report [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1585.919159] env[68279]: DEBUG oslo_concurrency.lockutils [req-60f67254-fd75-4945-8c59-4ee933b55566 req-4151574c-9cb6-4016-a5d3-5acdaaaca411 service nova] Releasing lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1586.029300] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1586.539100] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a04241f2-3ee6-4e19-8e33-7325e9bb32c9 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.009s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1586.540007] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.374s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1586.540194] env[68279]: INFO nova.compute.manager [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Unshelving [ 1587.564050] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1587.564456] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1587.564456] env[68279]: DEBUG nova.objects.instance [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'pci_requests' on Instance uuid ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1588.068756] env[68279]: DEBUG nova.objects.instance [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'numa_topology' on Instance uuid ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1588.571441] env[68279]: INFO nova.compute.claims [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1589.607490] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacec4e6-b5b7-4f8b-a54b-4fc0e87f9a9c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.614795] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242326b7-cdf0-4352-af55-7732799b722a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.644119] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca99fa9-ba5a-46ec-a54f-8210689184e8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.650894] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323c75d2-cbc3-415c-9b1e-2e448096013e {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.664597] env[68279]: DEBUG nova.compute.provider_tree [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1590.167740] env[68279]: DEBUG nova.scheduler.client.report [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1590.672596] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.108s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1590.703609] env[68279]: INFO nova.network.neutron [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating port 120855ec-e8a4-4d63-b680-0a3a546fff36 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1592.090604] env[68279]: DEBUG nova.compute.manager [req-394b3c3e-8f6e-437b-9e40-6c8a5ff59c61 req-05123e1c-0c5b-483d-b37b-be8208a42c56 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-vif-plugged-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1592.090878] env[68279]: DEBUG oslo_concurrency.lockutils [req-394b3c3e-8f6e-437b-9e40-6c8a5ff59c61 req-05123e1c-0c5b-483d-b37b-be8208a42c56 service nova] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1592.091185] env[68279]: DEBUG oslo_concurrency.lockutils [req-394b3c3e-8f6e-437b-9e40-6c8a5ff59c61 req-05123e1c-0c5b-483d-b37b-be8208a42c56 service nova] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1592.091389] env[68279]: DEBUG oslo_concurrency.lockutils [req-394b3c3e-8f6e-437b-9e40-6c8a5ff59c61 req-05123e1c-0c5b-483d-b37b-be8208a42c56 service nova] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1592.091628] env[68279]: DEBUG nova.compute.manager [req-394b3c3e-8f6e-437b-9e40-6c8a5ff59c61 req-05123e1c-0c5b-483d-b37b-be8208a42c56 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] No waiting events found dispatching network-vif-plugged-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1592.091740] env[68279]: WARNING nova.compute.manager [req-394b3c3e-8f6e-437b-9e40-6c8a5ff59c61 req-05123e1c-0c5b-483d-b37b-be8208a42c56 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received unexpected event network-vif-plugged-120855ec-e8a4-4d63-b680-0a3a546fff36 for instance with vm_state shelved_offloaded and task_state spawning. [ 1592.173316] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.173521] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1592.173702] env[68279]: DEBUG nova.network.neutron [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Building network info cache for instance {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1592.861635] env[68279]: DEBUG nova.network.neutron [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120855ec-e8", "ovs_interfaceid": "120855ec-e8a4-4d63-b680-0a3a546fff36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.364301] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1593.391300] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:41:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='45a2bf363fbd3add5b26fbb528697ce8',container_format='bare',created_at=2025-03-12T08:59:39Z,direct_url=,disk_format='vmdk',id=0951a8e9-3711-48fe-baaf-6f7422d21b26,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-445893839-shelved',owner='7c1ad13d5de94b14ab00b7f003c1851d',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-03-12T08:59:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1593.391538] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1593.391693] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image limits 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1593.391871] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Flavor pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1593.392027] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Image pref 0:0:0 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1593.392180] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68279) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1593.392387] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1593.392543] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1593.392703] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Got 1 possible topologies {{(pid=68279) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1593.392861] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1593.393042] env[68279]: DEBUG nova.virt.hardware [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68279) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1593.393868] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565abfbb-8591-4f82-9a51-24fc3c51273b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.401771] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f09938-5c63-4ed9-b24a-88324b4a7cb0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.414486] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:80:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '120855ec-e8a4-4d63-b680-0a3a546fff36', 'vif_model': 'vmxnet3'}] {{(pid=68279) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1593.421688] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1593.421915] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Creating VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1593.422111] env[68279]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b69ad80f-3956-41f8-8595-2b06e2f3b710 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.440445] env[68279]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1593.440445] env[68279]: value = "task-2964206" [ 1593.440445] env[68279]: _type = "Task" [ 1593.440445] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.447944] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964206, 'name': CreateVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.950634] env[68279]: DEBUG oslo_vmware.api [-] Task: {'id': task-2964206, 'name': CreateVM_Task, 'duration_secs': 0.339573} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.950836] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Created VM on the ESX host {{(pid=68279) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1593.951441] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.951613] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1593.952017] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1593.952276] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62b8c0fa-3eb3-4a86-98ed-8945a224da2a {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.956891] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1593.956891] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527f610f-3efc-2343-6b6a-49843f8d6b09" [ 1593.956891] env[68279]: _type = "Task" [ 1593.956891] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.964078] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]527f610f-3efc-2343-6b6a-49843f8d6b09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.115891] env[68279]: DEBUG nova.compute.manager [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1594.116093] env[68279]: DEBUG nova.compute.manager [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing instance network info cache due to event network-changed-120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1594.116311] env[68279]: DEBUG oslo_concurrency.lockutils [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] Acquiring lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.116453] env[68279]: DEBUG oslo_concurrency.lockutils [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] Acquired lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1594.116613] env[68279]: DEBUG nova.network.neutron [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Refreshing network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1594.466488] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1594.466939] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Processing image 0951a8e9-3711-48fe-baaf-6f7422d21b26 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1594.466939] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26/0951a8e9-3711-48fe-baaf-6f7422d21b26.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.467112] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26/0951a8e9-3711-48fe-baaf-6f7422d21b26.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1594.467215] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.467455] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f67133c6-078d-473f-90cf-ce0ca64cf548 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.475930] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.476121] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68279) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1594.476813] env[68279]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f1a595d-5cfe-4368-b5a8-9189d2a71472 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.481367] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1594.481367] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529e1d90-ded1-0d19-9a99-ecfc16f0669d" [ 1594.481367] env[68279]: _type = "Task" [ 1594.481367] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.488375] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]529e1d90-ded1-0d19-9a99-ecfc16f0669d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.803888] env[68279]: DEBUG nova.network.neutron [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updated VIF entry in instance network info cache for port 120855ec-e8a4-4d63-b680-0a3a546fff36. {{(pid=68279) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1594.804258] env[68279]: DEBUG nova.network.neutron [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [{"id": "120855ec-e8a4-4d63-b680-0a3a546fff36", "address": "fa:16:3e:36:80:e0", "network": {"id": "e70e9a22-4db4-416c-b374-a8878a47f247", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1380535594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1ad13d5de94b14ab00b7f003c1851d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120855ec-e8", "ovs_interfaceid": "120855ec-e8a4-4d63-b680-0a3a546fff36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.991543] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Preparing fetch location {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1594.991761] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Fetch image to [datastore1] OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17/OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17.vmdk {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1594.991930] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Downloading stream optimized image 0951a8e9-3711-48fe-baaf-6f7422d21b26 to [datastore1] OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17/OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17.vmdk on the data store datastore1 as vApp {{(pid=68279) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1594.992113] env[68279]: DEBUG nova.virt.vmwareapi.images [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Downloading image file data 0951a8e9-3711-48fe-baaf-6f7422d21b26 to the ESX as VM named 'OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17' {{(pid=68279) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1595.059007] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1595.059007] env[68279]: value = "resgroup-9" [ 1595.059007] env[68279]: _type = "ResourcePool" [ 1595.059007] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1595.059295] env[68279]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-64d00d55-ca32-496e-94c7-c291a5deec61 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.080019] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease: (returnval){ [ 1595.080019] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1baf4-f30f-6318-9e30-6e2ee2f36dad" [ 1595.080019] env[68279]: _type = "HttpNfcLease" [ 1595.080019] env[68279]: } obtained for vApp import into resource pool (val){ [ 1595.080019] env[68279]: value = "resgroup-9" [ 1595.080019] env[68279]: _type = "ResourcePool" [ 1595.080019] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1595.080461] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the lease: (returnval){ [ 1595.080461] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1baf4-f30f-6318-9e30-6e2ee2f36dad" [ 1595.080461] env[68279]: _type = "HttpNfcLease" [ 1595.080461] env[68279]: } to be ready. {{(pid=68279) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1595.086323] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1595.086323] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1baf4-f30f-6318-9e30-6e2ee2f36dad" [ 1595.086323] env[68279]: _type = "HttpNfcLease" [ 1595.086323] env[68279]: } is initializing. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1595.307139] env[68279]: DEBUG oslo_concurrency.lockutils [req-bdfc2218-755e-4196-b662-fec8af306771 req-6f8aba21-3893-4637-8d7f-47d349a50029 service nova] Releasing lock "refresh_cache-ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1595.588820] env[68279]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1595.588820] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1baf4-f30f-6318-9e30-6e2ee2f36dad" [ 1595.588820] env[68279]: _type = "HttpNfcLease" [ 1595.588820] env[68279]: } is ready. {{(pid=68279) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1595.589390] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1595.589390] env[68279]: value = "session[52cc6441-92da-e8e0-5b6e-485b3262a0cb]52d1baf4-f30f-6318-9e30-6e2ee2f36dad" [ 1595.589390] env[68279]: _type = "HttpNfcLease" [ 1595.589390] env[68279]: }. {{(pid=68279) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1595.590041] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e724e17-a889-4f64-8de0-c983802c49f7 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.597231] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d38124-3914-02ba-7902-c531c9696ed6/disk-0.vmdk from lease info. {{(pid=68279) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1595.597425] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d38124-3914-02ba-7902-c531c9696ed6/disk-0.vmdk. {{(pid=68279) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1595.661022] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6e170e1f-7658-40bf-823c-e8a535cbfb54 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.781420] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Completed reading data from the image iterator. {{(pid=68279) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1596.781805] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d38124-3914-02ba-7902-c531c9696ed6/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1596.782642] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feced24e-e8f7-4dd2-961e-cbce6390dcbe {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.789333] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d38124-3914-02ba-7902-c531c9696ed6/disk-0.vmdk is in state: ready. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1596.789537] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d38124-3914-02ba-7902-c531c9696ed6/disk-0.vmdk. {{(pid=68279) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1596.789778] env[68279]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-550ec7ff-d55f-4875-baa6-30fcd43d0ea0 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.033067] env[68279]: DEBUG oslo_vmware.rw_handles [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d38124-3914-02ba-7902-c531c9696ed6/disk-0.vmdk. {{(pid=68279) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1597.033287] env[68279]: INFO nova.virt.vmwareapi.images [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Downloaded image file data 0951a8e9-3711-48fe-baaf-6f7422d21b26 [ 1597.034155] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df19a0f-a011-47c3-961d-2112dcfa19d8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.049721] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2dabfaf1-9bda-404c-8bbe-968ed033affc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.071726] env[68279]: INFO nova.virt.vmwareapi.images [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] The imported VM was unregistered [ 1597.074217] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Caching image {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1597.074455] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Creating directory with path [datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26 {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1597.074714] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15a9a488-53fa-4f27-b891-38dfdbbc450c {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.096066] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Created directory with path [datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26 {{(pid=68279) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1597.096261] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17/OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17.vmdk to [datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26/0951a8e9-3711-48fe-baaf-6f7422d21b26.vmdk. {{(pid=68279) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1597.096531] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-76706067-c1a9-4582-bc36-cccf2a3e8550 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.102966] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1597.102966] env[68279]: value = "task-2964209" [ 1597.102966] env[68279]: _type = "Task" [ 1597.102966] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.110235] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964209, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.614903] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964209, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.116029] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964209, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.618713] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964209, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.118546] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964209, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.617749] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964209, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.28699} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.617749] env[68279]: INFO nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17/OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17.vmdk to [datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26/0951a8e9-3711-48fe-baaf-6f7422d21b26.vmdk. [ 1599.618110] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Cleaning up location [datastore1] OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17 {{(pid=68279) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1599.618185] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_da6d456d-7f72-453a-9046-48d2a782ca17 {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1599.618501] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c831b1b-675c-4ffc-b25f-515d712037a4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.625303] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1599.625303] env[68279]: value = "task-2964210" [ 1599.625303] env[68279]: _type = "Task" [ 1599.625303] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.632945] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.135972] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033944} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.136415] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1600.136415] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26/0951a8e9-3711-48fe-baaf-6f7422d21b26.vmdk" {{(pid=68279) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1600.136633] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26/0951a8e9-3711-48fe-baaf-6f7422d21b26.vmdk to [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1600.136880] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-897012f1-ee9e-4258-8b51-7936dcdc81dc {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.142844] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1600.142844] env[68279]: value = "task-2964211" [ 1600.142844] env[68279]: _type = "Task" [ 1600.142844] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.150100] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.656479] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964211, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.156882] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964211, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.657588] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964211, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.158217] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964211, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.656863] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964211, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.173619} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.657127] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/0951a8e9-3711-48fe-baaf-6f7422d21b26/0951a8e9-3711-48fe-baaf-6f7422d21b26.vmdk to [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk {{(pid=68279) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1602.657878] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cafc86-b528-4bc9-a492-41ff28158aae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.678929] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1602.679155] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be798654-0f57-440e-afaa-584047074fae {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.697457] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1602.697457] env[68279]: value = "task-2964212" [ 1602.697457] env[68279]: _type = "Task" [ 1602.697457] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.704982] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964212, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.207242] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964212, 'name': ReconfigVM_Task, 'duration_secs': 0.264874} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.207626] env[68279]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Reconfigured VM instance instance-0000007e to attach disk [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a/ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a.vmdk or device None with type streamOptimized {{(pid=68279) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1603.208059] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97e4617a-ba4f-4006-bf90-393a753fb5c2 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.213889] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1603.213889] env[68279]: value = "task-2964213" [ 1603.213889] env[68279]: _type = "Task" [ 1603.213889] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.220905] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964213, 'name': Rename_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.723753] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964213, 'name': Rename_Task, 'duration_secs': 0.141443} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.724039] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powering on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1603.724298] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1eef0b0-ed78-4c99-93be-1255532d21e9 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.731125] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1603.731125] env[68279]: value = "task-2964214" [ 1603.731125] env[68279]: _type = "Task" [ 1603.731125] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.739876] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.241474] env[68279]: DEBUG oslo_vmware.api [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964214, 'name': PowerOnVM_Task, 'duration_secs': 0.418015} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.241850] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powered on the VM {{(pid=68279) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1604.337150] env[68279]: DEBUG nova.compute.manager [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Checking state {{(pid=68279) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1604.338058] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10310781-2f78-465a-9607-93e8af79e251 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.853456] env[68279]: DEBUG oslo_concurrency.lockutils [None req-a2a8fc64-afd4-4ae3-89ab-95ac094e756d tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.313s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1605.879118] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1605.879512] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1605.879653] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1605.879859] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1605.880038] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1605.882271] env[68279]: INFO nova.compute.manager [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Terminating instance [ 1606.386035] env[68279]: DEBUG nova.compute.manager [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Start destroying the instance on the hypervisor. {{(pid=68279) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1606.386238] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Destroying instance {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1606.387127] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3e8f6c-bf1f-4812-aa29-ed233eb40b90 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.394962] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powering off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1606.395195] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c647cc34-791c-4b4a-81b4-71ceefcbebca {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.400991] env[68279]: DEBUG oslo_vmware.api [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1606.400991] env[68279]: value = "task-2964215" [ 1606.400991] env[68279]: _type = "Task" [ 1606.400991] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.408714] env[68279]: DEBUG oslo_vmware.api [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.911703] env[68279]: DEBUG oslo_vmware.api [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964215, 'name': PowerOffVM_Task, 'duration_secs': 0.206315} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.912158] env[68279]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Powered off the VM {{(pid=68279) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1606.912158] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Unregistering the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1606.912382] env[68279]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0d53d47-78be-4d9d-a967-d38ce0bb90c8 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.975195] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Unregistered the VM {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1606.975420] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Deleting contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1606.975543] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleting the datastore file [datastore1] ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1606.975805] env[68279]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32405578-bb44-4e55-8eb1-44254c365260 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.981559] env[68279]: DEBUG oslo_vmware.api [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for the task: (returnval){ [ 1606.981559] env[68279]: value = "task-2964217" [ 1606.981559] env[68279]: _type = "Task" [ 1606.981559] env[68279]: } to complete. {{(pid=68279) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.988964] env[68279]: DEBUG oslo_vmware.api [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.490936] env[68279]: DEBUG oslo_vmware.api [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Task: {'id': task-2964217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138582} completed successfully. {{(pid=68279) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.491220] env[68279]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted the datastore file {{(pid=68279) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.491405] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Deleted contents of the VM from datastore datastore1 {{(pid=68279) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1607.491578] env[68279]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Instance destroyed {{(pid=68279) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1607.491756] env[68279]: INFO nova.compute.manager [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1607.492052] env[68279]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68279) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1607.492282] env[68279]: DEBUG nova.compute.manager [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Deallocating network for instance {{(pid=68279) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1607.492379] env[68279]: DEBUG nova.network.neutron [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] deallocate_for_instance() {{(pid=68279) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1607.924220] env[68279]: DEBUG nova.compute.manager [req-d20d5bb1-9d27-4eda-beb1-9726260754a5 req-d0c7194e-9fe7-49db-90c9-8d478715e42f service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Received event network-vif-deleted-120855ec-e8a4-4d63-b680-0a3a546fff36 {{(pid=68279) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1607.925749] env[68279]: INFO nova.compute.manager [req-d20d5bb1-9d27-4eda-beb1-9726260754a5 req-d0c7194e-9fe7-49db-90c9-8d478715e42f service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Neutron deleted interface 120855ec-e8a4-4d63-b680-0a3a546fff36; detaching it from the instance and deleting it from the info cache [ 1607.925749] env[68279]: DEBUG nova.network.neutron [req-d20d5bb1-9d27-4eda-beb1-9726260754a5 req-d0c7194e-9fe7-49db-90c9-8d478715e42f service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.402497] env[68279]: DEBUG nova.network.neutron [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Updating instance_info_cache with network_info: [] {{(pid=68279) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.428300] env[68279]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d63f56a-cc3c-4712-862d-cb130202727f {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.438298] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a7784c-b378-410d-a996-7ca98a18cd64 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.464155] env[68279]: DEBUG nova.compute.manager [req-d20d5bb1-9d27-4eda-beb1-9726260754a5 req-d0c7194e-9fe7-49db-90c9-8d478715e42f service nova] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Detach interface failed, port_id=120855ec-e8a4-4d63-b680-0a3a546fff36, reason: Instance ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a could not be found. {{(pid=68279) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1608.905688] env[68279]: INFO nova.compute.manager [-] [instance: ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a] Took 1.41 seconds to deallocate network for instance. [ 1609.411663] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1609.412078] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1609.412166] env[68279]: DEBUG nova.objects.instance [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lazy-loading 'resources' on Instance uuid ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a {{(pid=68279) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.947759] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a63081-bab9-4a2e-8977-6a4cdc88e5a1 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.955685] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3c1833-78e0-466f-b145-a2e9fdffe879 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.985398] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d501b8db-1a21-4041-8604-589b5af97a2d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.993130] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b42a2e-45b5-461f-9fb2-e965821097b4 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.007826] env[68279]: DEBUG nova.compute.provider_tree [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1610.511119] env[68279]: DEBUG nova.scheduler.client.report [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1611.015857] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.604s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1611.034922] env[68279]: INFO nova.scheduler.client.report [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Deleted allocations for instance ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a [ 1611.542235] env[68279]: DEBUG oslo_concurrency.lockutils [None req-5d4e6d17-d32a-4f7f-a918-19819de64db2 tempest-ServerActionsTestOtherB-398246712 tempest-ServerActionsTestOtherB-398246712-project-member] Lock "ed5fb391-d8b4-4bb3-8533-fc94b3dd1c5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.663s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1631.168277] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.168754] env[68279]: DEBUG nova.compute.manager [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68279) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1632.169261] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1634.163591] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1634.168240] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1634.672604] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1634.672874] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1634.673071] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1634.673228] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68279) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1634.674144] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c913f61f-7f11-415d-9c80-061e8ad03b91 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.682889] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba7ad85-c4ff-47f0-b2cd-6410dd168ef6 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.696900] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25404e3-edc2-488a-ba55-5fde8188765b {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.703385] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911a4d33-b5e1-4b95-a30f-83483d43b28d {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.733019] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180929MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68279) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1634.733176] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1634.733432] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1635.753716] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1635.753972] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68279) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1635.769321] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Refreshing inventories for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1635.781930] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Updating ProviderTree inventory for provider 40ba16cf-8244-4715-b8c1-975029462ee4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1635.782125] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Updating inventory in ProviderTree for provider 40ba16cf-8244-4715-b8c1-975029462ee4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1635.792265] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Refreshing aggregate associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, aggregates: None {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1635.810632] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Refreshing trait associations for resource provider 40ba16cf-8244-4715-b8c1-975029462ee4, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=68279) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1635.823533] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee3480d-66bf-4cca-8ff3-778c21082aeb {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.831122] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34eeb49-031a-4ac6-bb87-25da35958a80 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.861851] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a95a83-c00a-4d54-ae3c-7b5847146d52 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.869133] env[68279]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e210cd7-8ebf-401d-a794-fd24d7e34544 {{(pid=68279) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.881670] env[68279]: DEBUG nova.compute.provider_tree [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed in ProviderTree for provider: 40ba16cf-8244-4715-b8c1-975029462ee4 {{(pid=68279) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.384866] env[68279]: DEBUG nova.scheduler.client.report [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Inventory has not changed for provider 40ba16cf-8244-4715-b8c1-975029462ee4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68279) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1636.890478] env[68279]: DEBUG nova.compute.resource_tracker [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68279) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1636.890946] env[68279]: DEBUG oslo_concurrency.lockutils [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.157s {{(pid=68279) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1637.891620] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.892076] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.168875] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1641.168505] env[68279]: DEBUG oslo_service.periodic_task [None req-2c23ad94-131a-4185-a320-ad57d4053ec2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68279) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}